gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2021 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.mwe2.codebuilder.fragments; import java.util.Iterator; import java.util.List; import java.util.regex.Pattern; import javax.inject.Inject; import com.google.common.collect.Iterators; import org.eclipse.emf.common.notify.Notifier; import org.eclipse.emf.common.util.URI; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.emf.ecore.resource.ResourceSet; import org.eclipse.xtend.core.xtend.XtendTypeDeclaration; import org.eclipse.xtend2.lib.StringConcatenationClient; import org.eclipse.xtext.AbstractRule; import org.eclipse.xtext.Assignment; import org.eclipse.xtext.EcoreUtil2; import org.eclipse.xtext.Keyword; import org.eclipse.xtext.common.types.JvmDeclaredType; import org.eclipse.xtext.common.types.JvmIdentifiableElement; import org.eclipse.xtext.common.types.JvmParameterizedTypeReference; import org.eclipse.xtext.common.types.JvmType; import org.eclipse.xtext.common.types.JvmTypeReference; import org.eclipse.xtext.common.types.access.IJvmTypeProvider; import org.eclipse.xtext.util.EmfFormatter; import org.eclipse.xtext.util.StringInputStream; import org.eclipse.xtext.util.Strings; import org.eclipse.xtext.xbase.XBooleanLiteral; import org.eclipse.xtext.xbase.XCastedExpression; import org.eclipse.xtext.xbase.XExpression; import org.eclipse.xtext.xbase.XFeatureCall; import org.eclipse.xtext.xbase.XNumberLiteral; import org.eclipse.xtext.xbase.XbaseFactory; import org.eclipse.xtext.xbase.lib.Procedures; import org.eclipse.xtext.xbase.lib.Pure; import org.eclipse.xtext.xtext.generator.model.GuiceModuleAccess.BindingFactory; import org.eclipse.xtext.xtext.generator.model.JavaFileAccess; import org.eclipse.xtext.xtext.generator.model.TypeReference; import io.sarl.lang.mwe2.codebuilder.extractor.CodeElementExtractor; /** Generator of the builder for XExpressions. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ public class ExpressionBuilderFragment extends AbstractSubCodeBuilderFragment { @Inject private BuilderFactoryContributions builderFactoryContributions; /** Replies the custom implementation for the expression builder. * * @return the custom implementation. */ @Pure public TypeReference getExpressionBuilderImplCustom() { return getCodeElementExtractor().getElementBuilderImplCustom("Expression"); //$NON-NLS-1$ } @Override public void generate() { generateIExpressionBuilder(); generateExpressionBuilderImpl(); if (getCodeBuilderConfig().isISourceAppendableEnable()) { generateExpressionAppender(); } generateBuilderFactoryContributions(); super.generate(); } @Override public void generateRuntimeBindings(BindingFactory factory) { super.generateRuntimeBindings(factory); bindTypeReferences(factory, getExpressionBuilderInterface(), getExpressionBuilderImpl(), getExpressionBuilderImplCustom()); } /** Generate the expression builder interface. */ protected void generateIExpressionBuilder() { final TypeReference builder = getExpressionBuilderInterface(); final StringConcatenationClient content = new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("/** Builder of a " + getLanguageName() //$NON-NLS-1$ + " XExpression."); //$NON-NLS-1$ it.newLine(); it.append(" */"); //$NON-NLS-1$ it.newLine(); it.append("@SuppressWarnings(\"all\")"); //$NON-NLS-1$ it.newLine(); it.append("public interface "); //$NON-NLS-1$ it.append(builder.getSimpleName()); it.append(" {"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append(generateMembers(true, false)); it.append("}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } }; final JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(builder, content); javaFile.writeTo(getSrcGen()); } /** Generate the expression builder implementation. */ protected void generateExpressionBuilderImpl() { final TypeReference builderInterface = getExpressionBuilderInterface(); final TypeReference builder = getExpressionBuilderImpl(); final StringConcatenationClient content = new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("/** Builder of a " + getLanguageName() //$NON-NLS-1$ + " XExpression."); //$NON-NLS-1$ it.newLine(); it.append(" */"); //$NON-NLS-1$ it.newLine(); it.append("@SuppressWarnings(\"all\")"); //$NON-NLS-1$ it.newLine(); it.append("public class "); //$NON-NLS-1$ it.append(builder.getSimpleName()); it.append(" extends "); //$NON-NLS-1$ it.append(getAbstractBuilderImpl()); it.append(" implements "); //$NON-NLS-1$ it.append(builderInterface); it.append(" {"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append(generateMembers(false, false)); it.append("}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } }; final JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(builder, content); javaFile.writeTo(getSrcGen()); } /** Generate the expression appender. */ protected void generateExpressionAppender() { final TypeReference builderInterface = getExpressionBuilderInterface(); final TypeReference appender = getCodeElementExtractor().getElementAppenderImpl("Expression"); //$NON-NLS-1$ final StringConcatenationClient content = new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("/** Builder of a " + getLanguageName() //$NON-NLS-1$ + " XExpression."); //$NON-NLS-1$ it.newLine(); it.append(" */"); //$NON-NLS-1$ it.newLine(); it.append("@SuppressWarnings(\"all\")"); //$NON-NLS-1$ it.newLine(); it.append("public class "); //$NON-NLS-1$ it.append(appender.getSimpleName()); it.append(" extends "); //$NON-NLS-1$ it.append(getCodeElementExtractor().getAbstractAppenderImpl()); it.append(" implements "); //$NON-NLS-1$ it.append(builderInterface); it.append(" {"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append(generateAppenderMembers(appender.getSimpleName(), builderInterface, "getXExpression()")); //$NON-NLS-1$ it.append(generateMembers(false, true)); it.append("}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } }; final JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(appender, content); javaFile.writeTo(getSrcGen()); } /** Generate the members of the builder. * * @param forInterface <code>true</code> if the code must be generated for an interface. * @param forAppender <code>true</code> if the code must be generated for an appender. * @return the code. */ @SuppressWarnings("checkstyle:all") protected StringConcatenationClient generateMembers(boolean forInterface, boolean forAppender) { final ExpressionContextDescription expressionContext = getExpressionContextDescription(); return new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { if (!forInterface && !forAppender) { it.append("\tprivate "); //$NON-NLS-1$ it.append(EObject.class); it.append(" context;"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("\tprivate "); //$NON-NLS-1$ it.append(Procedures.Procedure1.class); it.append("<? super "); //$NON-NLS-1$ it.append(XExpression.class); it.append("> setter;"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("\tprivate "); //$NON-NLS-1$ it.append(XExpression.class); it.append(" expr;"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } else { it.append("\t/** Find the reference to the type with the given name."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param typeName the fully qualified name of the type"); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the type reference."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(JvmParameterizedTypeReference.class); it.append(" newTypeRef(String typeName)"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn this.builder.newTypeRef(typeName);"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Find the reference to the type with the given name."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param context the context for the type reference use"); //$NON-NLS-1$ it.newLine(); it.append("\t * @param typeName the fully qualified name of the type"); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the type reference."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(JvmParameterizedTypeReference.class); it.append(" newTypeRef("); //$NON-NLS-1$ it.append(Notifier.class); it.append(" context, String typeName)"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn this.builder.newTypeRef(context, typeName);"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); } if (forInterface) { it.append("\t/** Replies the context for type resolution."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the context or {@code null} if the Ecore object is the context."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ it.append(IJvmTypeProvider.class); it.append(" getTypeResolutionContext();"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } else if (forAppender) { it.append("\tpublic "); //$NON-NLS-1$ it.append(IJvmTypeProvider.class); it.append(" getTypeResolutionContext() {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn this.builder.getTypeResolutionContext();"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } it.append("\t/** Initialize the expression."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param context the context of the expressions."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param setter the object that permits to assign the expression to the context."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append("void eInit("); //$NON-NLS-1$ it.append(EObject.class); it.append(" context, "); //$NON-NLS-1$ it.append(Procedures.Procedure1.class); it.append("<? super "); //$NON-NLS-1$ it.append(XExpression.class); it.append("> setter, "); //$NON-NLS-1$ it.append(IJvmTypeProvider.class); it.append(" typeContext)"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); if (forAppender) { it.append("\t\tthis.builder.eInit(context, setter, typeContext);"); //$NON-NLS-1$ } else { it.append("\t\tsetTypeResolutionContext(typeContext);"); //$NON-NLS-1$ it.newLine(); it.append("\t\tthis.context = context;"); //$NON-NLS-1$ it.newLine(); it.append("\t\tthis.setter = setter;"); //$NON-NLS-1$ it.newLine(); it.append("\t\tthis.expr = null;"); //$NON-NLS-1$ } it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Replies the last created expression."); //$NON-NLS-1$ it.newLine(); it.append("\t *"); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the last created expression."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(XExpression.class); it.append(" getXExpression()"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); if (forAppender) { it.append("\t\treturn this.builder.getXExpression();"); //$NON-NLS-1$ } else { it.append("\t\treturn this.expr;"); //$NON-NLS-1$ } it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Replies the resource to which the XExpression is attached."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(Resource.class); it.append(" eResource()"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn getXExpression().eResource();"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Change the expression in the container."); //$NON-NLS-1$ it.newLine(); it.append("\t *"); //$NON-NLS-1$ it.newLine(); it.append("\t * @param expression the textual representation of the expression."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append("void setExpression(String expression)"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); if (forAppender) { it.append("\t\tthis.builder.setExpression(expression);"); //$NON-NLS-1$ } else { it.append("\t\tthis.expr = fromString(expression);"); //$NON-NLS-1$ it.newLine(); it.append("\t\tthis.setter.apply(this.expr);"); //$NON-NLS-1$ } it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Change the expression in the container."); //$NON-NLS-1$ it.newLine(); it.append("\t *"); //$NON-NLS-1$ it.newLine(); it.append("\t * @param expression the expression."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append("void setXExpression("); //$NON-NLS-1$ it.append(XExpression.class); it.append(" expression)"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); if (forAppender) { it.append("\t\tthis.builder.setXExpression(expression);"); //$NON-NLS-1$ } else { it.append("\t\tthis.expr = expression;"); //$NON-NLS-1$ it.newLine(); it.append("\t\tthis.setter.apply(this.expr);"); //$NON-NLS-1$ } it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); if (!forInterface && !forAppender) { it.append("\t/** Generate a piece of "); //$NON-NLS-1$ it.append(getLanguageName()); it.append(" code that permits to compile an XExpression."); //$NON-NLS-1$ it.newLine(); it.append("\t *"); //$NON-NLS-1$ it.newLine(); it.append("\t * @param expression the expression to compile."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the "); //$NON-NLS-1$ it.append(getLanguageName()); it.append(" code."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\tstatic String generateExpressionCode(String expression) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn \""); //$NON-NLS-1$ it.append(expressionContext.getContainerKeyword()); it.append(" ____synthesis { "); //$NON-NLS-1$ it.append(expressionContext.getFieldDeclarationKeyword()); it.append(" ____fakefield = \" + expression + \" }\";"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("\tstatic String generateTypenameCode(String typeName) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn \""); //$NON-NLS-1$ it.append(expressionContext.getContainerKeyword()); it.append(" ____synthesis { "); //$NON-NLS-1$ it.append(expressionContext.getFieldDeclarationKeyword()); it.append(" ____fakefield : \" + typeName + \" }\";"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("\tstatic "); //$NON-NLS-1$ it.append(JvmParameterizedTypeReference.class); it.append(" parseType("); //$NON-NLS-1$ it.append(Notifier.class); it.append(" context, String typeName, "); //$NON-NLS-1$ it.append(getAbstractBuilderImpl()); it.append(" caller) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(ResourceSet.class); it.append(" resourceSet = toResource(context).getResourceSet();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(URI.class); it.append(" uri = caller.computeUnusedUri(resourceSet);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(Resource.class); it.append(" resource = caller.getResourceFactory().createResource(uri);"); //$NON-NLS-1$ it.newLine(); it.append("\t\tresourceSet.getResources().add(resource);"); //$NON-NLS-1$ it.newLine(); it.append("\t\ttry ("); //$NON-NLS-1$ it.append(StringInputStream.class); it.append(" is = new "); //$NON-NLS-1$ it.append(StringInputStream.class); it.append("(generateTypenameCode(typeName))) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tresource.load(is, null);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t"); //$NON-NLS-1$ it.append(getCodeElementExtractor().getLanguageScriptInterface()); it.append(" script = resource.getContents().isEmpty() ? null : ("); //$NON-NLS-1$ it.append(getCodeElementExtractor().getLanguageScriptInterface()); it.append(") resource.getContents().get(0);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t"); //$NON-NLS-1$ it.append(expressionContext.getContainerDescription().getElementType()); it.append(" topElement = ("); //$NON-NLS-1$ it.append(expressionContext.getContainerDescription().getElementType()); it.append(") script."); //$NON-NLS-1$ it.append(getLanguageScriptMemberGetter()); it.append("().get(0);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t"); //$NON-NLS-1$ it.append(expressionContext.getMemberDescription().getElementType()); it.append(" member = ("); //$NON-NLS-1$ it.append(expressionContext.getMemberDescription().getElementType()); it.append(") topElement.get"); //$NON-NLS-1$ it.append(Strings.toFirstUpper(getCodeBuilderConfig().getMemberCollectionExtensionGrammarName())); it.append("().get(0);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t"); //$NON-NLS-1$ it.append(JvmTypeReference.class); it.append(" reference = member.getType();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tif (reference instanceof "); //$NON-NLS-1$ it.append(JvmParameterizedTypeReference.class); it.append(") {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tfinal "); //$NON-NLS-1$ it.append(JvmParameterizedTypeReference.class); it.append(" pref = ("); //$NON-NLS-1$ it.append(JvmParameterizedTypeReference.class); it.append(") reference;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tif (!pref.getArguments().isEmpty()) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t\t"); //$NON-NLS-1$ it.append(EcoreUtil2.class); it.append(".resolveAll(resource);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t\treturn pref;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t} catch ("); //$NON-NLS-1$ it.append(Exception.class); it.append(" exception) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tthrow new "); //$NON-NLS-1$ it.append(TypeNotPresentException.class); it.append("(typeName, exception);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t} finally {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tresourceSet.getResources().remove(resource);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\tthrow new "); //$NON-NLS-1$ it.append(TypeNotPresentException.class); it.append("(typeName, null);"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Create an expression but does not change the container."); //$NON-NLS-1$ it.newLine(); it.append("\t *"); //$NON-NLS-1$ it.newLine(); it.append("\t * @param expression the textual representation of the expression."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the expression."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\tprotected "); //$NON-NLS-1$ it.append(XExpression.class); it.append(" fromString(String expression)"); //$NON-NLS-1$ it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\tif (!"); //$NON-NLS-1$ it.append(Strings.class); it.append(".isEmpty(expression)) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t"); //$NON-NLS-1$ it.append(ResourceSet.class); it.append(" resourceSet = this.context.eResource().getResourceSet();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t"); //$NON-NLS-1$ it.append(URI.class); it.append(" uri = computeUnusedUri(resourceSet);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t"); //$NON-NLS-1$ it.append(Resource.class); it.append(" resource = getResourceFactory().createResource(uri);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tresourceSet.getResources().add(resource);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\ttry ("); //$NON-NLS-1$ it.append(StringInputStream.class); it.append(" is = new "); //$NON-NLS-1$ it.append(StringInputStream.class); it.append("(generateExpressionCode(expression))) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tresource.load(is, null);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t"); //$NON-NLS-1$ it.append(getCodeElementExtractor().getLanguageScriptInterface()); it.append(" script = resource.getContents().isEmpty() ? null : ("); //$NON-NLS-1$ it.append(getCodeElementExtractor().getLanguageScriptInterface()); it.append(") resource.getContents().get(0);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t"); //$NON-NLS-1$ it.append(expressionContext.getContainerDescription().getElementType()); it.append(" topElement = ("); //$NON-NLS-1$ it.append(expressionContext.getContainerDescription().getElementType()); it.append(") script."); //$NON-NLS-1$ it.append(getLanguageScriptMemberGetter()); it.append("().get(0);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t"); //$NON-NLS-1$ it.append(expressionContext.getMemberDescription().getElementType()); it.append(" member = ("); //$NON-NLS-1$ it.append(expressionContext.getMemberDescription().getElementType()); it.append(") topElement.get"); //$NON-NLS-1$ it.append(Strings.toFirstUpper(getCodeBuilderConfig().getMemberCollectionExtensionGrammarName())); it.append("().get(0);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\treturn member.get"); //$NON-NLS-1$ it.append(Strings.toFirstUpper(expressionContext.getExpressionAssignment().getFeature())); it.append("();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t} catch (Throwable exception) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tthrow new RuntimeException(exception);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t} finally {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tresourceSet.getResources().remove(resource);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\tthrow new IllegalArgumentException(\"not a valid expression\");"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Replies the XExpression for the default value associated to the given type."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param type the type for which the default value should be determined."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the default value."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(XExpression.class); it.append(" getDefaultXExpressionForType(String type)"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); if (forAppender) { it.append("\t\treturn this.builder.getDefaultXExpressionForType(type);"); //$NON-NLS-1$ } else { it.append("\t\t//TODO: Check if a similar function exists in the Xbase library."); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(XExpression.class); it.append(" expr = null;"); //$NON-NLS-1$ it.newLine(); it.append("\t\tif (type != null && !\"void\".equals(type) && !Void.class.getName().equals(type)) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tswitch (type) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"boolean\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.Boolean\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t"); //$NON-NLS-1$ it.append(XBooleanLiteral.class); it.append(" booleanLiteral = "); //$NON-NLS-1$ it.append(XbaseFactory.class); it.append(".eINSTANCE.createXBooleanLiteral();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbooleanLiteral.setIsTrue(false);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\texpr = booleanLiteral;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"float\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.Float\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t"); //$NON-NLS-1$ it.append(XNumberLiteral.class); it.append(" numberLiteral = "); //$NON-NLS-1$ it.append(XbaseFactory.class); it.append(".eINSTANCE.createXNumberLiteral();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tnumberLiteral.setValue(\"0.0f\");"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\texpr = numberLiteral;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"double\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.Double\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.BigDecimal\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tnumberLiteral = "); //$NON-NLS-1$ it.append(XbaseFactory.class); it.append(".eINSTANCE.createXNumberLiteral();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tnumberLiteral.setValue(\"0.0\");"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\texpr = numberLiteral;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"int\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"long\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.Integer\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.Long\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.BigInteger\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tnumberLiteral = "); //$NON-NLS-1$ it.append(XbaseFactory.class); it.append(".eINSTANCE.createXNumberLiteral();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tnumberLiteral.setValue(\"0\");"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\texpr = numberLiteral;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"byte\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"short\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"char\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.Byte\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.Short\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"java.lang.Character\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tnumberLiteral = "); //$NON-NLS-1$ it.append(XbaseFactory.class); it.append(".eINSTANCE.createXNumberLiteral();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tnumberLiteral.setValue(\"0\");"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\t"); //$NON-NLS-1$ it.append(XCastedExpression.class); it.append(" castExpression = "); //$NON-NLS-1$ it.append(XbaseFactory.class); it.append(".eINSTANCE.createXCastedExpression();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tcastExpression.setTarget(numberLiteral);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tcastExpression.setType(newTypeRef(this.context, type));"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\texpr = numberLiteral;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tdefault:"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\texpr = "); //$NON-NLS-1$ it.append(XbaseFactory.class); it.append(".eINSTANCE.createXNullLiteral();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn expr;"); //$NON-NLS-1$ } it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Replies the default value for the given type."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param type the type for which the default value should be determined."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the default value."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append("String getDefaultValueForType(String type)"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); if (forAppender) { it.append("\t\treturn this.builder.getDefaultValueForType(type);"); //$NON-NLS-1$ } else { it.append("\t\t//TODO: Check if a similar function exists in the Xbase library."); //$NON-NLS-1$ it.newLine(); it.append("\t\tString defaultValue = \"\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\tif (!"); //$NON-NLS-1$ it.append(Strings.class); it.append(".isEmpty(type) && !\"void\".equals(type)) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tswitch (type) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"boolean\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdefaultValue = \"true\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"double\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdefaultValue = \"0.0\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"float\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdefaultValue = \"0.0f\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"int\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdefaultValue = \"0\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"long\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdefaultValue = \"0\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"byte\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdefaultValue = \"(0 as byte)\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"short\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdefaultValue = \"(0 as short)\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tcase \"char\":"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdefaultValue = \"(0 as char)\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tdefault:"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tdefaultValue = \"null\";"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tbreak;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn defaultValue;"); //$NON-NLS-1$ } it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append(generateStandardCommentFunctions(forInterface, forAppender, "getXExpression()")); //$NON-NLS-1$ if (!forInterface) { it.append("\t@"); //$NON-NLS-1$ it.append(Override.class); it.newLine(); it.append("\t@"); //$NON-NLS-1$ it.append(Pure.class); it.newLine(); it.append("\tpublic "); //$NON-NLS-1$ it.append(String.class); it.append(" toString() {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn "); //$NON-NLS-1$ if (forAppender) { it.append("this.builder.toString();"); //$NON-NLS-1$ } else { it.append(EmfFormatter.class); it.append(".objToStr(getXExpression());"); //$NON-NLS-1$ } it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } it.append("\t/** Create a reference to \"this\" object or to the current type."); //$NON-NLS-1$ it.newLine(); it.append("\t *"); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the reference."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(XFeatureCall.class); it.append(" createReferenceToThis()"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ if (forAppender) { it.append("return this.builder.createReferenceToThis();"); //$NON-NLS-1$ } else { it.append("final "); //$NON-NLS-1$ it.append(XExpression.class); it.append(" expr = getXExpression();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(XtendTypeDeclaration.class); it.append(" type = "); //$NON-NLS-1$ it.append(EcoreUtil2.class); it.append(".getContainerOfType(expr, "); //$NON-NLS-1$ it.append(XtendTypeDeclaration.class); it.append(".class);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(JvmType.class); it.append(" jvmObject = getAssociatedElement("); //$NON-NLS-1$ it.append(JvmType.class); it.append(".class, type, expr.eResource());"); //$NON-NLS-1$ it.newLine(); it.append("\t\tfinal "); //$NON-NLS-1$ it.append(XFeatureCall.class); it.append(" thisFeature = "); //$NON-NLS-1$ it.append(XbaseFactory.class); it.append(".eINSTANCE.createXFeatureCall();"); //$NON-NLS-1$ it.newLine(); it.append("\t\tthisFeature.setFeature(jvmObject);"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn thisFeature;"); //$NON-NLS-1$ } it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Create a reference to \"super\" object or to the super type."); //$NON-NLS-1$ it.newLine(); it.append("\t *"); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the reference."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\t"); //$NON-NLS-1$ if (!forInterface) { it.append("public "); //$NON-NLS-1$ } it.append(XFeatureCall.class); it.append(" createReferenceToSuper()"); //$NON-NLS-1$ if (forInterface) { it.append(";"); //$NON-NLS-1$ } else { it.append(" {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ if (forAppender) { it.append("return this.builder.createReferenceToSuper();"); //$NON-NLS-1$ } else { it.append("final "); //$NON-NLS-1$ it.append(XExpression.class); it.append(" expr = getXExpression();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(XtendTypeDeclaration.class); it.append(" type = "); //$NON-NLS-1$ it.append(EcoreUtil2.class); it.append(".getContainerOfType(expr, "); //$NON-NLS-1$ it.append(XtendTypeDeclaration.class); it.append(".class);"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(JvmType.class); it.append(" jvmObject = getAssociatedElement("); //$NON-NLS-1$ it.append(JvmType.class); it.append(".class, type, expr.eResource());"); //$NON-NLS-1$ it.newLine(); it.append("\t\tfinal "); //$NON-NLS-1$ it.append(XFeatureCall.class); it.append(" superFeature = "); //$NON-NLS-1$ it.append(XbaseFactory.class); it.append(".eINSTANCE.createXFeatureCall();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t"); //$NON-NLS-1$ it.append(JvmIdentifiableElement.class); it.append(" feature;"); //$NON-NLS-1$ it.newLine(); it.append("\t\tif (jvmObject instanceof "); //$NON-NLS-1$ it.append(JvmDeclaredType.class); it.append(") {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tfeature = (("); //$NON-NLS-1$ it.append(JvmDeclaredType.class); it.append(") jvmObject).getExtendedClass().getType();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t} else {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tfeature = findType(expr, getQualifiedName(type)).getType();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\tif (feature instanceof "); //$NON-NLS-1$ it.append(JvmDeclaredType.class); it.append(") {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tfeature = (("); //$NON-NLS-1$ it.append(JvmDeclaredType.class); it.append(") feature).getExtendedClass().getType();"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t} else {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t\tfeature = null;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\tif (feature == null) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\t\treturn null;"); //$NON-NLS-1$ it.newLine(); it.append("\t\t}"); //$NON-NLS-1$ it.newLine(); it.append("\t\tsuperFeature.setFeature(feature);"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn superFeature;"); //$NON-NLS-1$ } it.newLine(); it.newLineIfNotEmpty(); it.newLine(); it.append("\t}"); //$NON-NLS-1$ } it.newLineIfNotEmpty(); it.newLine(); if (forInterface) { it.append("\t/** Dispose the resource."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\tvoid dispose();"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } else if (forAppender) { it.append("\t/** Dispose the resource."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\tpublic void dispose() {"); //$NON-NLS-1$ it.newLine(); it.append("\t\tthis.builder.dispose();"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } } }; } /** Replies a keyword for declaring a container. * * @param grammarContainer the container description. * @return the keyword, never {@code null} nor an empty string. */ protected String ensureContainerKeyword(EObject grammarContainer) { final Iterator<Keyword> iterator = Iterators.filter(grammarContainer.eContents().iterator(), Keyword.class); if (iterator.hasNext()) { return iterator.next().getValue(); } return getExpressionConfig().getFieldContainerDeclarationKeyword(); } /** Replies a keyword for declaring a field. * * @param memberDescription the member description. * @return the keyword, never {@code null} nor an empty string. */ protected String ensureFieldDeclarationKeyword(CodeElementExtractor.ElementDescription memberDescription) { final List<String> modifiers = getCodeBuilderConfig().getModifiers().get(memberDescription.getName()); if (modifiers != null && !modifiers.isEmpty()) { return modifiers.get(0); } return getExpressionConfig().getFieldDeclarationKeyword(); } /** Replies the description of the expression context. * * @return the description. */ protected ExpressionContextDescription getExpressionContextDescription() { for (final CodeElementExtractor.ElementDescription containerDescription : getCodeElementExtractor().getTopElements( getGrammar(), getCodeBuilderConfig())) { final AbstractRule rule = getMemberRule(containerDescription); if (rule != null) { final Pattern fieldTypePattern = Pattern.compile(getExpressionConfig().getExpressionFieldTypenamePattern()); final ExpressionContextDescription description = getCodeElementExtractor().visitMemberElements( containerDescription, rule, null, (it, grammarContainer, memberContainer, classifier) -> { if (fieldTypePattern.matcher(classifier.getName()).find()) { final Assignment expressionAssignment = findAssignmentFromTerminalPattern( memberContainer, getExpressionConfig().getExpressionGrammarPattern()); final CodeElementExtractor.ElementDescription memberDescription = it.newElementDescription(classifier.getName(), memberContainer, classifier, XExpression.class); return new ExpressionContextDescription( containerDescription, memberDescription, ensureContainerKeyword(containerDescription.getGrammarComponent()), ensureFieldDeclarationKeyword(memberDescription), expressionAssignment); } return null; }, null); if (description != null) { return description; } } } return null; } /** Generate the contributions for the BuildFactory. */ protected void generateBuilderFactoryContributions() { final ExpressionContextDescription expressionContext = getExpressionContextDescription(); final String createFunctionName = "createXExpression"; //$NON-NLS-1$ this.builderFactoryContributions.addContribution(new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("\t/** Create the factory for a " + getLanguageName() //$NON-NLS-1$ + " XExpression."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param resourceSet the set of the resources that must be used for"); //$NON-NLS-1$ it.newLine(); it.append("\t * containing the generated resource, and resolving types from names."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the factory."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\tpublic "); //$NON-NLS-1$ it.append(getExpressionBuilderInterface()); it.append(" "); //$NON-NLS-1$ it.append(createFunctionName); it.append("("); //$NON-NLS-1$ it.append(ResourceSet.class); it.append(" resourceSet) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn "); //$NON-NLS-1$ it.append(createFunctionName); it.append("(createResource(resourceSet));"); //$NON-NLS-1$ it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Create the factory for a " + getLanguageName() //$NON-NLS-1$ + " XExpression."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param resource the resource that must be used for"); //$NON-NLS-1$ it.newLine(); it.append("\t * containing the generated resource, and resolving types from names."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the factory."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\tpublic "); //$NON-NLS-1$ it.append(getExpressionBuilderInterface()); it.append(" "); //$NON-NLS-1$ it.append(createFunctionName); it.append("("); //$NON-NLS-1$ it.append(Resource.class); it.append(" resource) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\tfinal "); //$NON-NLS-1$ it.append(getScriptBuilderInterface()); it.append(" script = createScript(getFooPackageName(), resource);"); //$NON-NLS-1$ it.newLine(); it.append("\t\tfinal "); //$NON-NLS-1$ it.append(expressionContext.getContainerDescription().getBuilderInterfaceType()); it.append(" topElement = script.add"); //$NON-NLS-1$ it.append(Strings.toFirstUpper(expressionContext.getContainerDescription().getElementType().getSimpleName())); it.append("(getFooTypeName());"); //$NON-NLS-1$ it.newLine(); it.append("\t\tfinal "); //$NON-NLS-1$ it.append(expressionContext.getMemberDescription().getBuilderInterfaceType()); it.append(" memberElement = topElement.add"); //$NON-NLS-1$ it.append(Strings.toFirstUpper(expressionContext.getMemberDescription().getElementType().getSimpleName())); it.append("(getFooMemberName());"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn memberElement.get"); //$NON-NLS-1$ it.append(Strings.toFirstUpper(expressionContext.getExpressionAssignment().getFeature())); it.append("();"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } }); if (getCodeBuilderConfig().isISourceAppendableEnable()) { final String buildFunctionName = "buildXExpression"; //$NON-NLS-1$ final TypeReference appender = getCodeElementExtractor().getElementAppenderImpl("Expression"); //$NON-NLS-1$ this.builderFactoryContributions.addContribution(new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("\t/** Create the appender for a " + getLanguageName() //$NON-NLS-1$ + " XExpression."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param resourceSet the set of the resources that must be used for"); //$NON-NLS-1$ it.newLine(); it.append("\t * containing the generated resource, and resolving types from names."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the appender."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\tpublic "); //$NON-NLS-1$ it.append(appender); it.append(" "); //$NON-NLS-1$ it.append(buildFunctionName); it.append("("); //$NON-NLS-1$ it.append(ResourceSet.class); it.append(" resourceSet) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn new "); //$NON-NLS-1$ it.append(appender); it.append("("); //$NON-NLS-1$ it.append(createFunctionName); it.append("(resourceSet));"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append("\t/** Create the appender for a " + getLanguageName() //$NON-NLS-1$ + " XExpression."); //$NON-NLS-1$ it.newLine(); it.append("\t * @param resource the resource that must be used for"); //$NON-NLS-1$ it.newLine(); it.append("\t * containing the generated resource, and resolving types from names."); //$NON-NLS-1$ it.newLine(); it.append("\t * @return the appender."); //$NON-NLS-1$ it.newLine(); it.append("\t */"); //$NON-NLS-1$ it.newLine(); it.append("\tpublic "); //$NON-NLS-1$ it.append(appender); it.append(" "); //$NON-NLS-1$ it.append(buildFunctionName); it.append("("); //$NON-NLS-1$ it.append(Resource.class); it.append(" resource) {"); //$NON-NLS-1$ it.newLine(); it.append("\t\treturn new "); //$NON-NLS-1$ it.append(appender); it.append("("); //$NON-NLS-1$ it.append(createFunctionName); it.append("(resource));"); //$NON-NLS-1$ it.newLine(); it.append("\t}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } }); } } /** Description of the expression context. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ public static class ExpressionContextDescription { private final CodeElementExtractor.ElementDescription member; private final CodeElementExtractor.ElementDescription container; private final String containerKeyword; private final String fieldDeclarationKeyword; private final Assignment expressionAssignment; /** Constructor. * * @param container the container of the element that contains the expression. * @param member the description of the expression container. * @param containerKeyword the keyword for declaring a type. * @param fieldDeclarationKeyword the keyword for declaring a field. * @param expressionAssignment the assignment that contains the expression in the element. */ public ExpressionContextDescription( CodeElementExtractor.ElementDescription container, CodeElementExtractor.ElementDescription member, String containerKeyword, String fieldDeclarationKeyword, Assignment expressionAssignment) { this.container = container; this.member = member; this.containerKeyword = containerKeyword; this.fieldDeclarationKeyword = fieldDeclarationKeyword; this.expressionAssignment = expressionAssignment; } /** Replies the container description. * * @return the container description. */ public CodeElementExtractor.ElementDescription getContainerDescription() { return this.container; } /** Replies the member description. * * @return the member description. */ public CodeElementExtractor.ElementDescription getMemberDescription() { return this.member; } /** Replies the assignment that contains the expression. * * @return the grammar assignment. */ public Assignment getExpressionAssignment() { return this.expressionAssignment; } /** Replies the first keyword associated to the container. * * @return the keyword. */ public String getContainerKeyword() { return this.containerKeyword; } /** Replies the keyword for declaring a field. * * @return the keyword. */ public String getFieldDeclarationKeyword() { return this.fieldDeclarationKeyword; } } }
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.expression.spel.ast; import java.math.BigDecimal; import java.math.BigInteger; import org.springframework.asm.Label; import org.springframework.asm.MethodVisitor; import org.springframework.expression.spel.CodeFlow; import org.springframework.expression.spel.ExpressionState; import org.springframework.util.ClassUtils; import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; /** * Common supertype for operators that operate on either one or two operands. * In the case of multiply or divide there would be two operands, but for * unary plus or minus, there is only one. * * @author Andy Clement * @author Juergen Hoeller * @author Giovanni Dall'Oglio Risso * @since 3.0 */ public abstract class Operator extends SpelNodeImpl { private final String operatorName; // The descriptors of the runtime operand values are used if the discovered declared // descriptors are not providing enough information (for example a generic type // whose accessors seem to only be returning 'Object' - the actual descriptors may // indicate 'int') protected String leftActualDescriptor; protected String rightActualDescriptor; public Operator(String payload,int pos,SpelNodeImpl... operands) { super(pos, operands); this.operatorName = payload; } public SpelNodeImpl getLeftOperand() { return this.children[0]; } public SpelNodeImpl getRightOperand() { return this.children[1]; } public final String getOperatorName() { return this.operatorName; } /** * String format for all operators is the same '(' [operand] [operator] [operand] ')' */ @Override public String toStringAST() { StringBuilder sb = new StringBuilder("("); sb.append(getChild(0).toStringAST()); for (int i = 1; i < getChildCount(); i++) { sb.append(" ").append(getOperatorName()).append(" "); sb.append(getChild(i).toStringAST()); } sb.append(")"); return sb.toString(); } protected boolean isCompilableOperatorUsingNumerics() { SpelNodeImpl left = getLeftOperand(); SpelNodeImpl right= getRightOperand(); if (!left.isCompilable() || !right.isCompilable()) { return false; } // Supported operand types for equals (at the moment) String leftDesc = left.exitTypeDescriptor; String rightDesc = right.exitTypeDescriptor; DescriptorComparison dc = DescriptorComparison.checkNumericCompatibility(leftDesc, rightDesc, this.leftActualDescriptor, this.rightActualDescriptor); return (dc.areNumbers && dc.areCompatible); } /** * Numeric comparison operators share very similar generated code, only differing in * two comparison instructions. */ protected void generateComparisonCode(MethodVisitor mv, CodeFlow cf, int compInstruction1, int compInstruction2) { String leftDesc = getLeftOperand().exitTypeDescriptor; String rightDesc = getRightOperand().exitTypeDescriptor; boolean unboxLeft = !CodeFlow.isPrimitive(leftDesc); boolean unboxRight = !CodeFlow.isPrimitive(rightDesc); DescriptorComparison dc = DescriptorComparison.checkNumericCompatibility(leftDesc, rightDesc, this.leftActualDescriptor, this.rightActualDescriptor); char targetType = dc.compatibleType;//CodeFlow.toPrimitiveTargetDesc(leftDesc); getLeftOperand().generateCode(mv, cf); if (unboxLeft) { CodeFlow.insertUnboxInsns(mv, targetType, leftDesc); } cf.enterCompilationScope(); getRightOperand().generateCode(mv, cf); cf.exitCompilationScope(); if (unboxRight) { CodeFlow.insertUnboxInsns(mv, targetType, rightDesc); } // assert: SpelCompiler.boxingCompatible(leftDesc, rightDesc) Label elseTarget = new Label(); Label endOfIf = new Label(); if (targetType=='D') { mv.visitInsn(DCMPG); mv.visitJumpInsn(compInstruction1, elseTarget); } else if (targetType=='F') { mv.visitInsn(FCMPG); mv.visitJumpInsn(compInstruction1, elseTarget); } else if (targetType=='J') { mv.visitInsn(LCMP); mv.visitJumpInsn(compInstruction1, elseTarget); } else if (targetType=='I') { mv.visitJumpInsn(compInstruction2, elseTarget); } else { throw new IllegalStateException("Unexpected descriptor "+leftDesc); } // Other numbers are not yet supported (isCompilable will not have returned true) mv.visitInsn(ICONST_1); mv.visitJumpInsn(GOTO,endOfIf); mv.visitLabel(elseTarget); mv.visitInsn(ICONST_0); mv.visitLabel(endOfIf); cf.pushDescriptor("Z"); } protected boolean equalityCheck(ExpressionState state, Object left, Object right) { if (left instanceof Number && right instanceof Number) { Number leftNumber = (Number) left; Number rightNumber = (Number) right; if (leftNumber instanceof BigDecimal || rightNumber instanceof BigDecimal) { BigDecimal leftBigDecimal = NumberUtils.convertNumberToTargetClass(leftNumber, BigDecimal.class); BigDecimal rightBigDecimal = NumberUtils.convertNumberToTargetClass(rightNumber, BigDecimal.class); return (leftBigDecimal == null ? rightBigDecimal == null : leftBigDecimal.compareTo(rightBigDecimal) == 0); } else if (leftNumber instanceof Double || rightNumber instanceof Double) { return (leftNumber.doubleValue() == rightNumber.doubleValue()); } else if (leftNumber instanceof Float || rightNumber instanceof Float) { return (leftNumber.floatValue() == rightNumber.floatValue()); } else if (leftNumber instanceof BigInteger || rightNumber instanceof BigInteger) { BigInteger leftBigInteger = NumberUtils.convertNumberToTargetClass(leftNumber, BigInteger.class); BigInteger rightBigInteger = NumberUtils.convertNumberToTargetClass(rightNumber, BigInteger.class); return (leftBigInteger == null ? rightBigInteger == null : leftBigInteger.compareTo(rightBigInteger) == 0); } else if (leftNumber instanceof Long || rightNumber instanceof Long) { return (leftNumber.longValue() == rightNumber.longValue()); } else if (leftNumber instanceof Integer || rightNumber instanceof Integer) { return (leftNumber.intValue() == rightNumber.intValue()); } else if (leftNumber instanceof Short || rightNumber instanceof Short) { return (leftNumber.shortValue() == rightNumber.shortValue()); } else if (leftNumber instanceof Byte || rightNumber instanceof Byte) { return (leftNumber.byteValue() == rightNumber.byteValue()); } else { // Unknown Number subtypes -> best guess is double comparison return (leftNumber.doubleValue() == rightNumber.doubleValue()); } } if (left instanceof CharSequence && right instanceof CharSequence) { return left.toString().equals(right.toString()); } if (ObjectUtils.nullSafeEquals(left, right)) { return true; } if (left instanceof Comparable && right instanceof Comparable) { Class<?> ancestor = ClassUtils.determineCommonAncestor(left.getClass(), right.getClass()); if (ancestor != null && Comparable.class.isAssignableFrom(ancestor)) { return (state.getTypeComparator().compare(left, right) == 0); } } return false; } /** * A descriptor comparison encapsulates the result of comparing descriptor for two operands and * describes at what level they are compatible. */ protected static class DescriptorComparison { static DescriptorComparison NOT_NUMBERS = new DescriptorComparison(false, false, ' '); static DescriptorComparison INCOMPATIBLE_NUMBERS = new DescriptorComparison(true, false, ' '); final boolean areNumbers; // Were the two compared descriptor both for numbers? final boolean areCompatible; // If they were numbers, were they compatible? final char compatibleType; // When compatible, what is the descriptor of the common type private DescriptorComparison(boolean areNumbers, boolean areCompatible, char compatibleType) { this.areNumbers = areNumbers; this.areCompatible = areCompatible; this.compatibleType = compatibleType; } /** * Returns an object that indicates whether the input descriptors are compatible. A declared descriptor * is what could statically be determined (e.g. from looking at the return value of a property accessor * method) whilst an actual descriptor is the type of an actual object that was returned, which may differ. * For generic types with unbound type variables the declared descriptor discovered may be 'Object' but * from the actual descriptor it is possible to observe that the objects are really numeric values (e.g. * ints). * @param leftDeclaredDescriptor the statically determinable left descriptor * @param rightDeclaredDescriptor the statically determinable right descriptor * @param leftActualDescriptor the dynamic/runtime left object descriptor * @param rightActualDescriptor the dynamic/runtime right object descriptor * @return a DescriptorComparison object indicating the type of compatibility, if any */ public static DescriptorComparison checkNumericCompatibility(String leftDeclaredDescriptor, String rightDeclaredDescriptor, String leftActualDescriptor, String rightActualDescriptor) { String ld = leftDeclaredDescriptor; String rd = rightDeclaredDescriptor; boolean leftNumeric = CodeFlow.isPrimitiveOrUnboxableSupportedNumberOrBoolean(ld); boolean rightNumeric = CodeFlow.isPrimitiveOrUnboxableSupportedNumberOrBoolean(rd); // If the declared descriptors aren't providing the information, try the actual descriptors if (!leftNumeric && !ld.equals(leftActualDescriptor)) { ld = leftActualDescriptor; leftNumeric = CodeFlow.isPrimitiveOrUnboxableSupportedNumberOrBoolean(ld); } if (!rightNumeric && !rd.equals(rightActualDescriptor)) { rd = rightActualDescriptor; rightNumeric = CodeFlow.isPrimitiveOrUnboxableSupportedNumberOrBoolean(rd); } if (leftNumeric && rightNumeric) { if (CodeFlow.areBoxingCompatible(ld, rd)) { return new DescriptorComparison(true, true, CodeFlow.toPrimitiveTargetDesc(ld)); } else { return DescriptorComparison.INCOMPATIBLE_NUMBERS; } } else { return DescriptorComparison.NOT_NUMBERS; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.helium; import org.apache.zeppelin.interpreter.*; import org.apache.zeppelin.interpreter.remote.RemoteAngularObjectRegistry; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterProcess; import org.apache.zeppelin.interpreter.thrift.RemoteApplicationResult; import org.apache.zeppelin.interpreter.thrift.RemoteInterpreterService; import org.apache.zeppelin.notebook.*; import org.apache.zeppelin.scheduler.ExecutorFactory; import org.apache.zeppelin.scheduler.Job; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import java.util.concurrent.ExecutorService; /** * HeliumApplicationFactory */ public class HeliumApplicationFactory implements ApplicationEventListener, NotebookEventListener { private final Logger logger = LoggerFactory.getLogger(HeliumApplicationFactory.class); private final ExecutorService executor; private Notebook notebook; private ApplicationEventListener applicationEventListener; public HeliumApplicationFactory() { executor = ExecutorFactory.singleton().createOrGet( HeliumApplicationFactory.class.getName(), 10); } private boolean isRemote(InterpreterGroup group) { return group.getAngularObjectRegistry() instanceof RemoteAngularObjectRegistry; } /** * Load pkg and run task */ public String loadAndRun(HeliumPackage pkg, Paragraph paragraph) { ApplicationState appState = paragraph.createOrGetApplicationState(pkg); onLoad(paragraph.getNote().getId(), paragraph.getId(), appState.getId(), appState.getHeliumPackage()); executor.submit(new LoadApplication(appState, pkg, paragraph)); return appState.getId(); } /** * Load application and run in the remote process */ private class LoadApplication implements Runnable { private final HeliumPackage pkg; private final Paragraph paragraph; private final ApplicationState appState; public LoadApplication(ApplicationState appState, HeliumPackage pkg, Paragraph paragraph) { this.appState = appState; this.pkg = pkg; this.paragraph = paragraph; } @Override public void run() { try { // get interpreter process Interpreter intp = paragraph.getBindedInterpreter(); ManagedInterpreterGroup intpGroup = (ManagedInterpreterGroup) intp.getInterpreterGroup(); RemoteInterpreterProcess intpProcess = intpGroup.getRemoteInterpreterProcess(); if (intpProcess == null) { throw new ApplicationException("Target interpreter process is not running"); } // load application load(intpProcess, appState); // run application RunApplication runTask = new RunApplication(paragraph, appState.getId()); runTask.run(); } catch (Exception e) { logger.error(e.getMessage(), e); if (appState != null) { appStatusChange(paragraph, appState.getId(), ApplicationState.Status.ERROR); appState.setOutput(e.getMessage()); } } } private void load(RemoteInterpreterProcess intpProcess, ApplicationState appState) throws Exception { synchronized (appState) { if (appState.getStatus() == ApplicationState.Status.LOADED) { // already loaded return; } appStatusChange(paragraph, appState.getId(), ApplicationState.Status.LOADING); final String pkgInfo = pkg.toJson(); final String appId = appState.getId(); RemoteApplicationResult ret = intpProcess.callRemoteFunction( new RemoteInterpreterProcess.RemoteFunction<RemoteApplicationResult>() { @Override public RemoteApplicationResult call(RemoteInterpreterService.Client client) throws Exception { return client.loadApplication( appId, pkgInfo, paragraph.getNote().getId(), paragraph.getId()); } } ); if (ret.isSuccess()) { appStatusChange(paragraph, appState.getId(), ApplicationState.Status.LOADED); } else { throw new ApplicationException(ret.getMsg()); } } } } /** * Get ApplicationState * @param paragraph * @param appId * @return */ public ApplicationState get(Paragraph paragraph, String appId) { return paragraph.getApplicationState(appId); } /** * Unload application * It does not remove ApplicationState * * @param paragraph * @param appId */ public void unload(Paragraph paragraph, String appId) { executor.execute(new UnloadApplication(paragraph, appId)); } /** * Unload application task */ private class UnloadApplication implements Runnable { private final Paragraph paragraph; private final String appId; public UnloadApplication(Paragraph paragraph, String appId) { this.paragraph = paragraph; this.appId = appId; } @Override public void run() { ApplicationState appState = null; try { appState = paragraph.getApplicationState(appId); if (appState == null) { logger.warn("Can not find {} to unload from {}", appId, paragraph.getId()); return; } if (appState.getStatus() == ApplicationState.Status.UNLOADED) { // not loaded return; } unload(appState); } catch (Exception e) { logger.error(e.getMessage(), e); if (appState != null) { appStatusChange(paragraph, appId, ApplicationState.Status.ERROR); appState.setOutput(e.getMessage()); } } } private void unload(final ApplicationState appsToUnload) throws ApplicationException { synchronized (appsToUnload) { if (appsToUnload.getStatus() != ApplicationState.Status.LOADED) { throw new ApplicationException( "Can't unload application status " + appsToUnload.getStatus()); } appStatusChange(paragraph, appsToUnload.getId(), ApplicationState.Status.UNLOADING); Interpreter intp = null; try { intp = paragraph.getBindedInterpreter(); } catch (InterpreterException e) { throw new ApplicationException("No interpreter found", e); } RemoteInterpreterProcess intpProcess = ((ManagedInterpreterGroup) intp.getInterpreterGroup()).getRemoteInterpreterProcess(); if (intpProcess == null) { throw new ApplicationException("Target interpreter process is not running"); } RemoteApplicationResult ret = intpProcess.callRemoteFunction( new RemoteInterpreterProcess.RemoteFunction<RemoteApplicationResult>() { @Override public RemoteApplicationResult call(RemoteInterpreterService.Client client) throws Exception { return client.unloadApplication(appsToUnload.getId()); } } ); if (ret.isSuccess()) { appStatusChange(paragraph, appsToUnload.getId(), ApplicationState.Status.UNLOADED); } else { throw new ApplicationException(ret.getMsg()); } } } } /** * Run application * It does not remove ApplicationState * * @param paragraph * @param appId */ public void run(Paragraph paragraph, String appId) { executor.execute(new RunApplication(paragraph, appId)); } /** * Run application task */ private class RunApplication implements Runnable { private final Paragraph paragraph; private final String appId; public RunApplication(Paragraph paragraph, String appId) { this.paragraph = paragraph; this.appId = appId; } @Override public void run() { ApplicationState appState = null; try { appState = paragraph.getApplicationState(appId); if (appState == null) { logger.warn("Can not find {} to unload from {}", appId, paragraph.getId()); return; } run(appState); } catch (Exception e) { logger.error(e.getMessage(), e); if (appState != null) { appStatusChange(paragraph, appId, ApplicationState.Status.UNLOADED); appState.setOutput(e.getMessage()); } } } private void run(final ApplicationState app) throws ApplicationException { synchronized (app) { if (app.getStatus() != ApplicationState.Status.LOADED) { throw new ApplicationException( "Can't run application status " + app.getStatus()); } Interpreter intp = null; try { intp = paragraph.getBindedInterpreter(); } catch (InterpreterException e) { throw new ApplicationException("No interpreter found", e); } RemoteInterpreterProcess intpProcess = ((ManagedInterpreterGroup) intp.getInterpreterGroup()).getRemoteInterpreterProcess(); if (intpProcess == null) { throw new ApplicationException("Target interpreter process is not running"); } RemoteApplicationResult ret = intpProcess.callRemoteFunction( new RemoteInterpreterProcess.RemoteFunction<RemoteApplicationResult>() { @Override public RemoteApplicationResult call(RemoteInterpreterService.Client client) throws Exception { return client.runApplication(app.getId()); } } ); if (ret.isSuccess()) { // success } else { throw new ApplicationException(ret.getMsg()); } } } } @Override public void onOutputAppend( String noteId, String paragraphId, int index, String appId, String output) { ApplicationState appToUpdate = getAppState(noteId, paragraphId, appId); if (appToUpdate != null) { appToUpdate.appendOutput(output); } else { logger.error("Can't find app {}", appId); } if (applicationEventListener != null) { applicationEventListener.onOutputAppend(noteId, paragraphId, index, appId, output); } } @Override public void onOutputUpdated( String noteId, String paragraphId, int index, String appId, InterpreterResult.Type type, String output) { ApplicationState appToUpdate = getAppState(noteId, paragraphId, appId); if (appToUpdate != null) { appToUpdate.setOutput(output); } else { logger.error("Can't find app {}", appId); } if (applicationEventListener != null) { applicationEventListener.onOutputUpdated(noteId, paragraphId, index, appId, type, output); } } @Override public void onLoad(String noteId, String paragraphId, String appId, HeliumPackage pkg) { if (applicationEventListener != null) { applicationEventListener.onLoad(noteId, paragraphId, appId, pkg); } } @Override public void onStatusChange(String noteId, String paragraphId, String appId, String status) { ApplicationState appToUpdate = getAppState(noteId, paragraphId, appId); if (appToUpdate != null) { appToUpdate.setStatus(ApplicationState.Status.valueOf(status)); } if (applicationEventListener != null) { applicationEventListener.onStatusChange(noteId, paragraphId, appId, status); } } private void appStatusChange(Paragraph paragraph, String appId, ApplicationState.Status status) { ApplicationState app = paragraph.getApplicationState(appId); app.setStatus(status); onStatusChange(paragraph.getNote().getId(), paragraph.getId(), appId, status.toString()); } private ApplicationState getAppState(String noteId, String paragraphId, String appId) { if (notebook == null) { return null; } Note note = notebook.getNote(noteId); if (note == null) { logger.error("Can't get note {}", noteId); return null; } Paragraph paragraph = note.getParagraph(paragraphId); if (paragraph == null) { logger.error("Can't get paragraph {}", paragraphId); return null; } ApplicationState appFound = paragraph.getApplicationState(appId); return appFound; } public Notebook getNotebook() { return notebook; } public void setNotebook(Notebook notebook) { this.notebook = notebook; } public ApplicationEventListener getApplicationEventListener() { return applicationEventListener; } public void setApplicationEventListener(ApplicationEventListener applicationEventListener) { this.applicationEventListener = applicationEventListener; } @Override public void onNoteRemove(Note note) { } @Override public void onNoteCreate(Note note) { } @Override public void onUnbindInterpreter(Note note, InterpreterSetting setting) { for (Paragraph p : note.getParagraphs()) { Interpreter currentInterpreter = null; try { currentInterpreter = p.getBindedInterpreter(); } catch (InterpreterNotFoundException e) { logger.warn("Not interpreter found", e); return; } List<InterpreterInfo> infos = setting.getInterpreterInfos(); for (InterpreterInfo info : infos) { if (currentInterpreter != null && info.getClassName().equals(currentInterpreter.getClassName())) { onParagraphRemove(p); break; } } } } @Override public void onParagraphRemove(Paragraph paragraph) { List<ApplicationState> appStates = paragraph.getAllApplicationStates(); for (ApplicationState app : appStates) { UnloadApplication unloadJob = new UnloadApplication(paragraph, app.getId()); unloadJob.run(); } } @Override public void onParagraphCreate(Paragraph p) { } @Override public void onParagraphStatusChange(Paragraph p, Job.Status status) { if (status == Job.Status.FINISHED) { // refresh application List<ApplicationState> appStates = p.getAllApplicationStates(); for (ApplicationState app : appStates) { loadAndRun(app.getHeliumPackage(), p); } } } }
package org.keycloak.services.resources.admin; import org.jboss.resteasy.annotations.cache.NoCache; import org.jboss.resteasy.plugins.providers.multipart.InputPart; import org.jboss.resteasy.plugins.providers.multipart.MultipartFormDataInput; import org.jboss.resteasy.spi.BadRequestException; import org.jboss.resteasy.spi.NotAcceptableException; import org.jboss.resteasy.spi.NotFoundException; import org.keycloak.events.admin.OperationType; import org.keycloak.models.ClientModel; import org.keycloak.models.KeycloakSession; import org.keycloak.models.RealmModel; import org.keycloak.models.utils.KeycloakModelUtils; import org.keycloak.util.CertificateUtils; import org.keycloak.util.PemUtils; import javax.ws.rs.Consumes; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.UriInfo; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.KeyStore; import java.security.NoSuchAlgorithmException; import java.security.PrivateKey; import java.security.cert.Certificate; import java.security.cert.X509Certificate; import java.util.List; import java.util.Map; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class ClientAttributeCertificateResource { public static final String PRIVATE_KEY = "private.key"; public static final String X509CERTIFICATE = "certificate"; protected RealmModel realm; private RealmAuth auth; protected ClientModel client; protected KeycloakSession session; protected AdminEventBuilder adminEvent; protected String attributePrefix; protected String privateAttribute; protected String certificateAttribute; public ClientAttributeCertificateResource(RealmModel realm, RealmAuth auth, ClientModel client, KeycloakSession session, String attributePrefix, AdminEventBuilder adminEvent) { this.realm = realm; this.auth = auth; this.client = client; this.session = session; this.attributePrefix = attributePrefix; this.privateAttribute = attributePrefix + "." + PRIVATE_KEY; this.certificateAttribute = attributePrefix + "." + X509CERTIFICATE; this.adminEvent = adminEvent; } public static class ClientKeyPairInfo { protected String privateKey; protected String publicKey; protected String certificate; public String getPrivateKey() { return privateKey; } public void setPrivateKey(String privateKey) { this.privateKey = privateKey; } public String getCertificate() { return certificate; } public void setCertificate(String certificate) { this.certificate = certificate; } } /** * * @return */ @GET @NoCache @Produces(MediaType.APPLICATION_JSON) public ClientKeyPairInfo getKeyInfo() { ClientKeyPairInfo info = new ClientKeyPairInfo(); info.setCertificate(client.getAttribute(certificateAttribute)); info.setPrivateKey(client.getAttribute(privateAttribute)); return info; } /** * * @return */ @POST @NoCache @Path("generate") @Produces(MediaType.APPLICATION_JSON) public ClientKeyPairInfo generate() { auth.requireManage(); String subject = client.getClientId(); KeyPair keyPair = null; try { KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA"); generator.initialize(2048); keyPair = generator.generateKeyPair(); } catch (NoSuchAlgorithmException e) { throw new RuntimeException(e); } X509Certificate certificate = null; try { certificate = CertificateUtils.generateV1SelfSignedCertificate(keyPair, subject); } catch (Exception e) { throw new RuntimeException(e); } String privateKeyPem = KeycloakModelUtils.getPemFromKey(keyPair.getPrivate()); String certPem = KeycloakModelUtils.getPemFromCertificate(certificate); client.setAttribute(privateAttribute, privateKeyPem); client.setAttribute(certificateAttribute, certPem); KeycloakModelUtils.generateClientKeyPairCertificate(client); ClientKeyPairInfo info = new ClientKeyPairInfo(); info.setCertificate(client.getAttribute(certificateAttribute)); info.setPrivateKey(client.getAttribute(privateAttribute)); adminEvent.operation(OperationType.ACTION).resourcePath(session.getContext().getUri()).representation(info).success(); return info; } /** * * @param uriInfo * @param input * @return * @throws IOException */ @POST @Path("upload") @Consumes(MediaType.MULTIPART_FORM_DATA) @Produces(MediaType.APPLICATION_JSON) public ClientKeyPairInfo uploadJks(@Context final UriInfo uriInfo, MultipartFormDataInput input) throws IOException { auth.requireManage(); ClientKeyPairInfo info = new ClientKeyPairInfo(); Map<String, List<InputPart>> uploadForm = input.getFormDataMap(); List<InputPart> inputParts = uploadForm.get("file"); String keystoreFormat = uploadForm.get("keystoreFormat").get(0).getBodyAsString(); String keyAlias = uploadForm.get("keyAlias").get(0).getBodyAsString(); List<InputPart> keyPasswordPart = uploadForm.get("keyPassword"); char[] keyPassword = keyPasswordPart != null ? keyPasswordPart.get(0).getBodyAsString().toCharArray() : null; List<InputPart> storePasswordPart = uploadForm.get("storePassword"); char[] storePassword = storePasswordPart != null ? storePasswordPart.get(0).getBodyAsString().toCharArray() : null; PrivateKey privateKey = null; X509Certificate certificate = null; try { KeyStore keyStore = null; if (keystoreFormat.equals("JKS")) keyStore = KeyStore.getInstance("JKS"); else keyStore = KeyStore.getInstance(keystoreFormat, "BC"); keyStore.load(inputParts.get(0).getBody(InputStream.class, null), storePassword); try { privateKey = (PrivateKey)keyStore.getKey(keyAlias, keyPassword); } catch (Exception e) { // ignore } certificate = (X509Certificate)keyStore.getCertificate(keyAlias); } catch (Exception e) { throw new RuntimeException(e); } if (privateKey != null) { String privateKeyPem = KeycloakModelUtils.getPemFromKey(privateKey); client.setAttribute(privateAttribute, privateKeyPem); info.setPrivateKey(privateKeyPem); } else if (certificate != null) { client.removeAttribute(privateAttribute); } if (certificate != null) { String certPem = KeycloakModelUtils.getPemFromCertificate(certificate); client.setAttribute(certificateAttribute, certPem); info.setCertificate(certPem); } adminEvent.operation(OperationType.ACTION).resourcePath(session.getContext().getUri()).representation(info).success(); return info; } public static class KeyStoreConfig { protected Boolean realmCertificate; protected String storePassword; protected String keyPassword; protected String keyAlias; protected String realmAlias; protected String format; public Boolean isRealmCertificate() { return realmCertificate; } public void setRealmCertificate(Boolean realmCertificate) { this.realmCertificate = realmCertificate; } public String getStorePassword() { return storePassword; } public void setStorePassword(String storePassword) { this.storePassword = storePassword; } public String getKeyPassword() { return keyPassword; } public void setKeyPassword(String keyPassword) { this.keyPassword = keyPassword; } public String getKeyAlias() { return keyAlias; } public void setKeyAlias(String keyAlias) { this.keyAlias = keyAlias; } public String getRealmAlias() { return realmAlias; } public void setRealmAlias(String realmAlias) { this.realmAlias = realmAlias; } public String getFormat() { return format; } public void setFormat(String format) { this.format = format; } } /** * * @param config * @return */ @POST @NoCache @Path("/download") @Produces(MediaType.APPLICATION_OCTET_STREAM) @Consumes(MediaType.APPLICATION_JSON) public byte[] getKeystore(final KeyStoreConfig config) { auth.requireView(); if (config.getFormat() != null && !config.getFormat().equals("JKS") && !config.getFormat().equals("PKCS12")) { throw new NotAcceptableException("Only support jks format."); } String format = config.getFormat(); String privatePem = client.getAttribute(privateAttribute); String certPem = client.getAttribute(certificateAttribute); if (privatePem == null && certPem == null) { throw new NotFoundException("keypair not generated for client"); } if (privatePem != null && config.getKeyPassword() == null) { throw new BadRequestException("Need to specify a key password for jks download"); } if (config.getStorePassword() == null) { throw new BadRequestException("Need to specify a store password for jks download"); } final KeyStore keyStore; try { if (format.equals("JKS")) keyStore = KeyStore.getInstance("JKS"); else keyStore = KeyStore.getInstance(format, "BC"); keyStore.load(null, null); String keyAlias = config.getKeyAlias(); if (keyAlias == null) keyAlias = client.getClientId(); if (privatePem != null) { PrivateKey privateKey = PemUtils.decodePrivateKey(privatePem); X509Certificate clientCert = PemUtils.decodeCertificate(certPem); Certificate[] chain = {clientCert}; keyStore.setKeyEntry(keyAlias, privateKey, config.getKeyPassword().trim().toCharArray(), chain); } else { X509Certificate clientCert = PemUtils.decodeCertificate(certPem); keyStore.setCertificateEntry(keyAlias, clientCert); } if (config.isRealmCertificate() == null || config.isRealmCertificate().booleanValue()) { X509Certificate certificate = realm.getCertificate(); if (certificate == null) { KeycloakModelUtils.generateRealmCertificate(realm); certificate = realm.getCertificate(); } String certificateAlias = config.getRealmAlias(); if (certificateAlias == null) certificateAlias = realm.getName(); keyStore.setCertificateEntry(certificateAlias, certificate); } ByteArrayOutputStream stream = new ByteArrayOutputStream(); keyStore.store(stream, config.getStorePassword().trim().toCharArray()); stream.flush(); stream.close(); byte[] rtn = stream.toByteArray(); return rtn; } catch (Exception e) { throw new RuntimeException(e); } } }
/** Copyright 2013 Luciano Zu project Ardulink http://www.ardulink.org/ Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. @author Luciano Zu */ package org.zu.ardulink.connection.proxy; import static java.lang.Math.max; import static org.zu.ardulink.connection.proxy.NetworkProxyConnection.DEFAULT_LISTENING_PORT; import java.io.PrintWriter; import java.net.ServerSocket; import java.net.Socket; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import org.kohsuke.args4j.Argument; import org.kohsuke.args4j.CmdLineException; import org.kohsuke.args4j.CmdLineParser; import org.kohsuke.args4j.Option; import org.kohsuke.args4j.spi.BooleanOptionHandler; import org.kohsuke.args4j.spi.SubCommand; import org.kohsuke.args4j.spi.SubCommandHandler; import org.kohsuke.args4j.spi.SubCommands; import org.zu.ardulink.Link; import org.zu.ardulink.connection.pi.RaspberryPIConnection; /** * [ardulinktitle] [ardulinkversion] * * @author Luciano Zu project Ardulink http://www.ardulink.org/ * * [adsense] */ public class NetworkProxyServer implements NetworkProxyMessages { private interface Command { void execute(int portNumber); } public static class StartCommand implements Command { @Override public void execute(int portNumber) { try { ServerSocket serverSocket = new ServerSocket(portNumber); try { System.out .println("Ardulink Network Proxy Server running..."); while (listening) { NetworkProxyServerConnection connection = new NetworkProxyServerConnection( serverSocket.accept()); Thread thread = new Thread(connection); thread.start(); TimeUnit.SECONDS.sleep(2); } } finally { serverSocket.close(); } } catch (Exception e) { e.printStackTrace(); System.exit(-1); } System.out.println("Ardulink Network Proxy Server stops."); } } public static class StopCommand implements Command { @Override public void execute(int portNumber) { try { Socket socket = new Socket("127.0.0.1", portNumber); PrintWriter writer = new PrintWriter(socket.getOutputStream(), true); writer.println(STOP_SERVER_CMD); writer.close(); socket.close(); System.out .println("Ardulink Network Proxy Server stop requested."); } catch (Exception e) { e.printStackTrace(); } } } private static boolean listening = true; @Argument(required = true, usage = "command", handler = SubCommandHandler.class) @SubCommands({ @SubCommand(name = "start", impl = StartCommand.class), @SubCommand(name = "stop", impl = StopCommand.class) }) private Command command; @Option(name = "-p", aliases = "--port", usage = "Local port to bind to") private int portNumber = DEFAULT_LISTENING_PORT; @Option(name = "-rasp", aliases = "--raspberryGPIO", handler=BooleanOptionHandler.class, usage = "Link used is for Raspberry PI GPIO") private static boolean raspGPIOConnection; private static Map<String, Integer> linkUsers = new HashMap<String, Integer>(); public static void main(String[] args) { new NetworkProxyServer().doMain(args); } private void doMain(String[] args) { CmdLineParser cmdLineParser = new CmdLineParser(this); try { cmdLineParser.parseArgument(args); } catch (CmdLineException e) { System.err.println(e.getMessage()); cmdLineParser.printUsage(System.err); return; } command.execute(portNumber); } public static void stop() { listening = false; } public static Link connect(String portName, int baudRate) { Link link = Link.getInstance(portName); if (link == null) { link = retrieveInstance(portName); } if (!link.isConnected()) { link.connect(portName, baudRate); } addUserToLink(portName); return link; } public static boolean disconnect(String portName) { boolean retvalue = false; if (!Link.getDefaultInstance().getName().equals(portName)) { Link link = Link.getInstance(portName); if (link != null) { int currentUsers = removeUserFromLink(portName); if (currentUsers == 0) { retvalue = link.disconnect(); Link.destroyInstance(portName); } } else { removeUserFromLink(portName); } } return retvalue; } private static int addUserToLink(String portName) { synchronized (linkUsers) { Integer users = linkUsers.get(portName); int retvalue = users == null ? 1 : users + 1; linkUsers.put(portName, retvalue); return retvalue; } } private static int removeUserFromLink(String portName) { synchronized (linkUsers) { Integer users = linkUsers.get(portName); int retvalue = users == null ? 0 : max(0, users - 1); linkUsers.put(portName, retvalue); return retvalue; } } public static List<String> getPortList() { List<String> retvalue = null; if(raspGPIOConnection) { retvalue = retrieveInstance(RaspberryPIConnection.CONNECTION_NAME).getPortList(); } else { retvalue = Link.getDefaultInstance().getPortList(); } return retvalue; } private static Link retrieveInstance(String portName) { Link retvalue; if(raspGPIOConnection) { retvalue = Link.createInstance(portName, new RaspberryPIConnection()); } else { retvalue = Link.createInstance(portName); } return retvalue; } }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.siddhi.core.query.selector; import org.apache.log4j.Logger; import org.wso2.siddhi.core.config.ExecutionPlanContext; import org.wso2.siddhi.core.event.ComplexEvent; import org.wso2.siddhi.core.event.ComplexEventChunk; import org.wso2.siddhi.core.event.state.populater.StateEventPopulator; import org.wso2.siddhi.core.event.stream.StreamEvent; import org.wso2.siddhi.core.exception.ExecutionPlanCreationException; import org.wso2.siddhi.core.executor.condition.ConditionExpressionExecutor; import org.wso2.siddhi.core.query.output.ratelimit.OutputRateLimiter; import org.wso2.siddhi.core.query.processor.Processor; import org.wso2.siddhi.core.query.selector.attribute.processor.AttributeProcessor; import org.wso2.siddhi.query.api.execution.query.selection.Selector; import java.util.*; public class QuerySelector implements Processor { private static final Logger log = Logger.getLogger(QuerySelector.class); private static final ThreadLocal<String> keyThreadLocal = new ThreadLocal<String>(); private Selector selector; private ExecutionPlanContext executionPlanContext; private boolean currentOn = false; private boolean expiredOn = false; private boolean containsAggregator = false; private OutputRateLimiter outputRateLimiter; private List<AttributeProcessor> attributeProcessorList; private ConditionExpressionExecutor havingConditionExecutor = null; private boolean isGroupBy = false; private GroupByKeyGenerator groupByKeyGenerator; private String id; private StateEventPopulator eventPopulator; public QuerySelector(String id, Selector selector, boolean currentOn, boolean expiredOn, ExecutionPlanContext executionPlanContext) { this.id = id; this.currentOn = currentOn; this.expiredOn = expiredOn; this.selector = selector; this.executionPlanContext = executionPlanContext; } public static String getThreadLocalGroupByKey() { return keyThreadLocal.get(); } @Override public void process(ComplexEventChunk complexEventChunk) { if (log.isTraceEnabled()) { log.trace("event is processed by selector " + id + this); } if(!containsAggregator) { boolean eventSent = false; complexEventChunk.reset(); while (complexEventChunk.hasNext()) { //todo optimize ComplexEvent event = complexEventChunk.next(); if (event.getType() == StreamEvent.Type.CURRENT || event.getType() == StreamEvent.Type.EXPIRED) { eventPopulator.populateStateEvent(event); if (isGroupBy) { keyThreadLocal.set(groupByKeyGenerator.constructEventKey(event)); } //TODO: have to change for windows for (AttributeProcessor attributeProcessor : attributeProcessorList) { attributeProcessor.process(event); } complexEventChunk.remove(); if ((event.getType() == StreamEvent.Type.CURRENT && currentOn) || (event.getType() == StreamEvent.Type.EXPIRED && expiredOn)) { if (!(havingConditionExecutor != null && !havingConditionExecutor.execute(event))) { outputRateLimiter.add(event); eventSent = true; } } if (isGroupBy) { keyThreadLocal.remove(); } } } if (eventSent) { complexEventChunk.clear(); outputRateLimiter.process(complexEventChunk); } } else { processInBatches(complexEventChunk); } } public void processInBatches(ComplexEventChunk complexEventChunk) { Map<String, ComplexEvent> groupedEvents = new LinkedHashMap<String, ComplexEvent>(); boolean eventSent = false; complexEventChunk.reset(); ComplexEvent lastEvent = null; while (complexEventChunk.hasNext()) { ComplexEvent event = complexEventChunk.next(); if (event.getType() == StreamEvent.Type.CURRENT || event.getType() == StreamEvent.Type.EXPIRED) { eventPopulator.populateStateEvent(event); String groupByKey = ""; if (isGroupBy) { groupByKey = groupByKeyGenerator.constructEventKey(event); keyThreadLocal.set(groupByKey); } for (AttributeProcessor attributeProcessor : attributeProcessorList) { attributeProcessor.process(event); } complexEventChunk.remove(); if ((event.getType() == StreamEvent.Type.CURRENT && currentOn) || (event.getType() == StreamEvent.Type.EXPIRED && expiredOn)) { if (!(havingConditionExecutor != null && !havingConditionExecutor.execute(event))) { if (isGroupBy) { groupedEvents.put(groupByKey, event); } else { lastEvent = event; } eventSent = true; } } if (isGroupBy) { keyThreadLocal.remove(); } } } if (eventSent) { if (isGroupBy) { for (ComplexEvent complexEvent : groupedEvents.values()) { outputRateLimiter.add(complexEvent); } } else { outputRateLimiter.add(lastEvent); } complexEventChunk.clear(); outputRateLimiter.process(complexEventChunk); } } private void evaluateHavingConditions(ComplexEventChunk<StreamEvent> streamEventBuffer) { while (streamEventBuffer.hasNext()) { StreamEvent streamEvent = streamEventBuffer.next(); if (!havingConditionExecutor.execute(streamEvent)) { streamEventBuffer.remove(); // eventManager.clear(event); todo use this after fixing join cases } } } @Override public Processor getNextProcessor() { return null; //since there is no processors after a query selector } @Override public void setNextProcessor(Processor processor) { //this method will not be used as there is no processors after a query selector } public void setNextProcessor(OutputRateLimiter outputRateLimiter) { if (this.outputRateLimiter == null) { this.outputRateLimiter = outputRateLimiter; } else { throw new ExecutionPlanCreationException("outputRateLimiter is already assigned"); } } @Override public void setToLast(Processor processor) { if (getNextProcessor() == null) { this.setNextProcessor(processor); } else { getNextProcessor().setToLast(processor); } } @Override public Processor cloneProcessor(String key) { return null; } public List<AttributeProcessor> getAttributeProcessorList() { return attributeProcessorList; } public void setAttributeProcessorList(List<AttributeProcessor> attributeProcessorList, boolean containsAggregator) { this.attributeProcessorList = attributeProcessorList; this.containsAggregator = containsAggregator; } public void setGroupByKeyGenerator(GroupByKeyGenerator groupByKeyGenerator) { isGroupBy = true; this.groupByKeyGenerator = groupByKeyGenerator; } public void setHavingConditionExecutor(ConditionExpressionExecutor havingConditionExecutor) { this.havingConditionExecutor = havingConditionExecutor; } public QuerySelector clone(String key) { QuerySelector clonedQuerySelector = new QuerySelector(id + key, selector, currentOn, expiredOn, executionPlanContext); List<AttributeProcessor> clonedAttributeProcessorList = new ArrayList<AttributeProcessor>(); for (AttributeProcessor attributeProcessor : attributeProcessorList) { clonedAttributeProcessorList.add(attributeProcessor.cloneProcessor(key)); } clonedQuerySelector.attributeProcessorList = clonedAttributeProcessorList; clonedQuerySelector.isGroupBy = isGroupBy; clonedQuerySelector.containsAggregator = containsAggregator; clonedQuerySelector.groupByKeyGenerator = groupByKeyGenerator; clonedQuerySelector.havingConditionExecutor = havingConditionExecutor; clonedQuerySelector.eventPopulator = eventPopulator; return clonedQuerySelector; } public void setEventPopulator(StateEventPopulator eventPopulator) { this.eventPopulator = eventPopulator; } }
/** * Copyright 2014 Microsoft Open Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microsoftopentechnologies.intellij.wizards; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.net.URI; import java.util.*; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import com.microsoftopentechnologies.azurecommons.deploy.model.CertificateUpload; import com.microsoftopentechnologies.azurecommons.deploy.model.CertificateUploadList; import com.microsoftopentechnologies.azurecommons.deploy.model.DeployDescriptor; import com.microsoftopentechnologies.azurecommons.deploy.model.RemoteDesktopDescriptor; import com.microsoftopentechnologies.azurecommons.deploy.tasks.*; import com.microsoftopentechnologies.azurecommons.deploy.util.PublishData; import com.microsoftopentechnologies.azurecommons.deploy.wizard.ConfigurationEventArgs; import com.microsoftopentechnologies.azurecommons.deploy.wizard.ConfigurationEventListener; import com.microsoftopentechnologies.azurecommons.deploy.wizard.WizardCacheManagerUtilMethods; import com.microsoftopentechnologies.azurecommons.exception.RestAPIException; import com.microsoftopentechnologies.azurecommons.wacommonutil.FileUtil; import com.microsoftopentechnologies.azurecommons.wacommonutil.PreferenceSetUtil; import com.microsoftopentechnologies.azuremanagementutil.model.KeyName; import com.microsoftopentechnologies.azuremanagementutil.model.StorageService; import com.microsoftopentechnologies.azuremanagementutil.model.StorageServices; import com.microsoftopentechnologies.azuremanagementutil.model.Subscription; import com.microsoftopentechnologies.azuremanagementutil.rest.WindowsAzureServiceManagement; import com.microsoftopentechnologies.azuremanagementutil.rest.WindowsAzureStorageServices; import com.microsoftopentechnologies.intellij.rest.*; import com.microsoft.windowsazure.Configuration; import com.microsoft.windowsazure.management.compute.models.HostedServiceCreateParameters; import com.microsoft.windowsazure.management.compute.models.HostedServiceGetDetailedResponse; import com.microsoft.windowsazure.management.models.LocationsListResponse.Location; import com.microsoft.windowsazure.management.storage.models.StorageAccountCreateParameters; import com.microsoft.windowsazure.management.compute.models.ServiceCertificateListResponse.Certificate; import com.microsoft.windowsazure.management.compute.models.HostedServiceListResponse.HostedService; import com.interopbridges.tools.windowsazure.WindowsAzurePackageType; import com.microsoftopentechnologies.intellij.AzurePlugin; import com.microsoftopentechnologies.intellij.ui.components.WindowsAzurePage; import static com.microsoftopentechnologies.intellij.AzurePlugin.log; import static com.microsoftopentechnologies.intellij.ui.messages.AzureBundle.message; public final class WizardCacheManager { private static final WizardCacheManager INSTANCE = new WizardCacheManager(); private static final List<PublishData> PUBLISHS = new ArrayList<PublishData>(); private static PublishData currentPublishData; private static KeyName currentAccessKey; private static String currentStorageService; private static String currentHostedService; private static String deployFile; private static String deployConfigFile; private static String deployState; private static WindowsAzurePackageType deployMode; private static String unpublish; private static RemoteDesktopDescriptor remoteDesktopDescriptor; private static CertificateUploadList certList; private static boolean displayHttpsLink = false; private static Map<String, String> publishSettingsPerSubscriptionMap = new HashMap<String, String>(); public static WizardCacheManager getInstrance() { return INSTANCE; } public static List<CertificateUpload> getList() { return certList.getList(); } private WizardCacheManager() { WindowsAzurePage.addConfigurationEventListener(new ConfigurationEventListener() { @Override public void onConfigurationChanged(ConfigurationEventArgs config) { try { notifyConfiguration(config); } catch (RestAPIException e) { log(message("error"), e); } } }); } public static DeployDescriptor collectConfiguration() { DeployDescriptor deployDescriptor = new DeployDescriptor(deployMode, currentPublishData.getCurrentSubscription().getId(), getCurrentStorageAcount(), currentAccessKey, getCurentHostedService(), deployFile, deployConfigFile, deployState, remoteDesktopDescriptor, checkSchemaVersionAndReturnUrl(), unpublish, certList, displayHttpsLink, currentPublishData.getCurrentConfiguration()); remoteDesktopDescriptor = null; return deployDescriptor; } public static WindowsAzureStorageServices createStorageServiceHelper() { return WizardCacheManagerUtilMethods.createStorageServiceHelper(currentPublishData, currentStorageService, currentAccessKey); } public static WindowsAzureServiceManagement createServiceManagementHelper() { return WizardCacheManagerUtilMethods.createServiceManagementHelper(currentPublishData); } public static List<Location> getLocation() { return WizardCacheManagerUtilMethods.getLocation(currentPublishData); } public static String getCurrentDeplyFile() { return deployFile; } public static boolean getDisplayHttpsLink() { return displayHttpsLink; } public static String getCurrentDeployConfigFile() { return deployConfigFile; } public static String getCurrentDeplyState() { return deployState; } public static String getUnpublish() { return unpublish; } public static RemoteDesktopDescriptor getCurrentRemoteDesktopDescriptor() { return remoteDesktopDescriptor; } public static PublishData getCurrentPublishData() { return currentPublishData; } public static Collection<PublishData> getPublishDatas() { return PUBLISHS; } public static Subscription findSubscriptionByName(String subscriptionName) { return WizardCacheManagerUtilMethods.findSubscriptionByName(subscriptionName, PUBLISHS); } public static PublishData findPublishDataBySubscriptionId(String subscriptionId) { return WizardCacheManagerUtilMethods.findPublishDataBySubscriptionId(subscriptionId, PUBLISHS); } public static String findSubscriptionNameBySubscriptionId(String subscriptionId) { return WizardCacheManagerUtilMethods.findSubscriptionNameBySubscriptionId(subscriptionId, PUBLISHS); } public static void removeSubscription(String subscriptionId) { if (subscriptionId == null) { return; } PublishData publishData = findPublishDataBySubscriptionId(subscriptionId); if (publishData == null) { return; } List<Subscription> subs = publishData.getPublishProfile().getSubscriptions(); int index = WizardCacheManagerUtilMethods.getIndexOfPublishData(subscriptionId, PUBLISHS); for (int i = 0; i < subs.size(); i++) { Subscription s = subs.get(i); if (s.getSubscriptionID().equals(subscriptionId)) { publishData.getPublishProfile().getSubscriptions().remove(i); PUBLISHS.set(index, publishData); if (publishData.getPublishProfile().getSubscriptions().size() == 0) { PUBLISHS.remove(publishData); /* * If all subscriptions are removed * set current subscription to null. */ setCurrentPublishData(null); } break; } } } public static void changeCurrentSubscription(PublishData publishData, String subscriptionId) { WizardCacheManagerUtilMethods.changeCurrentSubscription(publishData, subscriptionId); } public static StorageService getCurrentStorageAcount() { return WizardCacheManagerUtilMethods.getCurrentStorageAcount(currentPublishData, currentStorageService); } public static HostedService getCurentHostedService() { return WizardCacheManagerUtilMethods.getCurentHostedService(currentPublishData, currentHostedService); } public static HostedService getHostedServiceFromCurrentPublishData(final String hostedServiceName) { return WizardCacheManagerUtilMethods.getHostedServiceFromCurrentPublishData(hostedServiceName, currentPublishData); } /** * Method uses REST API and returns already uploaded certificates * from currently selected cloud service on wizard. * @return */ public static List<Certificate> fetchUploadedCertificates() { return WizardCacheManagerUtilMethods.fetchUploadedCertificates(currentPublishData, currentHostedService); } public static HostedService createHostedService(HostedServiceCreateParameters createHostedService) throws Exception { HostedService hostedService = WizardCacheManagerUtilMethods.createHostedService(createHostedService, currentPublishData); currentPublishData.getServicesPerSubscription().get(currentPublishData.getCurrentSubscription().getId()).add(hostedService); return hostedService; } public static StorageService createStorageAccount(StorageAccountCreateParameters accountParameters) throws Exception { Subscription subscription = currentPublishData.getCurrentSubscription(); StorageService storageAccount = WizardCacheManagerUtilMethods.createStorageAccount(accountParameters, currentPublishData); // remove previous mock if existed currentPublishData.getStoragesPerSubscription().get(subscription.getId()).remove(accountParameters.getName()); currentPublishData.getStoragesPerSubscription().get(subscription.getId()).add(storageAccount); return storageAccount; } public static boolean isHostedServiceNameAvailable(final String hostedServiceName) throws Exception { return WizardCacheManagerUtilMethods.isHostedServiceNameAvailable(hostedServiceName, currentPublishData); } public static boolean isStorageAccountNameAvailable(final String storageAccountName) throws Exception { return WizardCacheManagerUtilMethods.isStorageAccountNameAvailable(storageAccountName, currentPublishData); } public static StorageService createStorageServiceMock(String storageAccountNameToCreate, String storageAccountLocation, String description) { StorageService storageService = WizardCacheManagerUtilMethods.createStorageServiceMock(storageAccountNameToCreate, storageAccountLocation, description); currentPublishData.getStoragesPerSubscription().get(currentPublishData.getCurrentSubscription().getId()).add(storageService); return storageService; } public static HostedService createHostedServiceMock(String hostedServiceNameToCreate, String hostedServiceLocation, String description) { Subscription subscription = currentPublishData.getCurrentSubscription(); HostedService hostedService = WizardCacheManagerUtilMethods.createHostedServiceMock(hostedServiceNameToCreate, hostedServiceLocation, description); currentPublishData.getServicesPerSubscription().get(subscription.getId()).add(hostedService); return hostedService; } public static List<HostedService> getHostedServices() { return WizardCacheManagerUtilMethods.getHostedServices(currentPublishData); } private void notifyConfiguration(ConfigurationEventArgs config) throws RestAPIException { if (ConfigurationEventArgs.DEPLOY_FILE.equals(config.getKey())) { deployFile = config.getValue().toString(); } else if (ConfigurationEventArgs.DEPLOY_CONFIG_FILE.equals(config.getKey())) { deployConfigFile = config.getValue().toString(); } else if (ConfigurationEventArgs.DEPLOY_STATE.equals(config.getKey())) { deployState = config.getValue().toString(); } else if (ConfigurationEventArgs.SUBSCRIPTION.equals(config.getKey())) { PublishData publishData = (PublishData) config.getValue(); if (publishData.isInitialized() == false && publishData.isInitializing().compareAndSet(false, true)) { // CacheAccountWithProgressWindow settings = new CacheAccountWithProgressWindow(null, publishData, Display.getDefault().getActiveShell(), null); // Display.getDefault().syncExec(settings); } } else if (ConfigurationEventArgs.HOSTED_SERVICE.equals(config.getKey())) { HostedService hostedService = (HostedService) config.getValue(); if (hostedService != null) currentHostedService = hostedService.getServiceName(); } else if (ConfigurationEventArgs.STORAGE_ACCOUNT.equals(config.getKey())) { StorageService storageService = (StorageService) config.getValue(); if (storageService != null) { currentStorageService = storageService.getServiceName(); } } else if (ConfigurationEventArgs.REMOTE_DESKTOP .equals(config.getKey())) { remoteDesktopDescriptor = (RemoteDesktopDescriptor) config.getValue(); } else if (ConfigurationEventArgs.CERTIFICATES.equals(config.getKey())) { certList = (CertificateUploadList) config.getValue(); } else if (ConfigurationEventArgs.DEPLOY_MODE.equals(config.getKey())) { deployMode = (WindowsAzurePackageType) config.getValue(); } else if (ConfigurationEventArgs.UN_PUBLISH.equals(config.getKey())) { unpublish = config.getValue().toString(); } else if (ConfigurationEventArgs.STORAGE_ACCESS_KEY.equals(config.getKey())) { String value = config.getValue().toString(); if (value != null && !value.isEmpty()) { currentAccessKey = KeyName.valueOf(value); } else { currentAccessKey = KeyName.Primary; } } else if (ConfigurationEventArgs.CONFIG_HTTPS_LINK.equals(config.getKey())) { String value = config.getValue().toString(); if (value != null && !value.isEmpty()) { displayHttpsLink = Boolean.parseBoolean(value.trim()); } } } public static HostedServiceGetDetailedResponse getHostedServiceWithDeployments(String hostedService) throws Exception { return WizardCacheManagerUtilMethods.getHostedServiceWithDeployments(hostedService, currentPublishData); } public static void setCurrentPublishData(PublishData currentSubscription2) { currentPublishData = currentSubscription2; } public static void cachePublishData(File publishSettingsFile, PublishData publishData, LoadingAccoutListener listener) throws RestAPIException, IOException { boolean canceled = false; List<Subscription> subscriptions = null; int OPERATIONS_TIMEOUT = 60 * 5; if (publishData == null) { return; } else { subscriptions = publishData.getPublishProfile().getSubscriptions(); } if (subscriptions == null) { return; } String schemaVer = publishData.getPublishProfile().getSchemaVersion(); boolean isNewSchema = schemaVer != null && !schemaVer.isEmpty() && schemaVer.equalsIgnoreCase("2.0"); // URL if schema version is 1.0 String url = publishData.getPublishProfile().getUrl(); Map<String, Configuration> configurationPerSubscription = new HashMap<String, Configuration>(); for (Subscription subscription : subscriptions) { if (isNewSchema) { // publishsetting file is of schema version 2.0 url = subscription.getServiceManagementUrl(); } if (url == null || url.isEmpty()) { try { url = PreferenceSetUtil.getManagementURL(PreferenceSetUtil.getSelectedPreferenceSetName(AzurePlugin.prefFilePath), AzurePlugin.prefFilePath); url = url.substring(0, url.lastIndexOf("/")); } catch (Exception e) { log(e.getMessage()); } } Configuration configuration = (publishSettingsFile == null) ? WindowsAzureRestUtils.loadConfiguration(subscription.getId(), url) : WindowsAzureRestUtils.getConfiguration(publishSettingsFile, subscription.getId()); configurationPerSubscription.put(subscription.getId(), configuration); if (publishSettingsFile != null) { //copy file to user home String outFile = System.getProperty("user.home") + File.separator + ".azure" + File.separator + publishSettingsFile.getName(); try { // copy file to user home FileUtil.writeFile(new FileInputStream(publishSettingsFile), new FileOutputStream(outFile)); // put an entry into global cache publishSettingsPerSubscriptionMap.put(subscription.getId(), outFile); } catch (IOException e) { // Ignore error e.printStackTrace(); } } } publishData.setConfigurationPerSubscription(configurationPerSubscription); if (publishData.isInitialized() == false && publishData.isInitializing().compareAndSet(false, true)) { List<Future<?>> loadServicesFutures = null; Future<?> loadSubscriptionsFuture = null; try { List<Subscription> subBackup = publishData.getPublishProfile().getSubscriptions(); // thread pool size is number of subscriptions ScheduledExecutorService subscriptionThreadPool = Executors.newScheduledThreadPool(subscriptions.size()); LoadingSubscriptionTask loadingSubscriptionTask = new LoadingSubscriptionTask(publishData); loadingSubscriptionTask.setSubscriptionIds(subscriptions); if (listener != null) { loadingSubscriptionTask.addLoadingAccountListener(listener); } loadSubscriptionsFuture = subscriptionThreadPool.submit(new LoadingTaskRunner(loadingSubscriptionTask)); loadSubscriptionsFuture.get(OPERATIONS_TIMEOUT, TimeUnit.SECONDS); /* * add explicitly management URL and certificate which was removed * Changes are did to support both publish setting schema versions. */ if (isNewSchema) { for (int i = 0; i < subBackup.size(); i++) { publishData.getPublishProfile().getSubscriptions().get(i). setServiceManagementUrl(subBackup.get(i).getServiceManagementUrl()); publishData.getPublishProfile().getSubscriptions().get(i). setManagementCertificate(subBackup.get(i).getManagementCertificate()); } } if (publishData.getCurrentSubscription() == null && publishData.getPublishProfile().getSubscriptions().size() > 0) { publishData.setCurrentSubscription(publishData.getPublishProfile().getSubscriptions().get(0)); } // thread pool size is 3 to load hosted services, locations and storage accounts. ScheduledExecutorService threadPool = Executors.newScheduledThreadPool(3); loadServicesFutures = new ArrayList<Future<?>>(); // Hosted services LoadingHostedServicesTask loadingHostedServicesTask = new LoadingHostedServicesTask(publishData); if (listener != null) { loadingHostedServicesTask.addLoadingAccountListener(listener); } Future<?> submitHostedServices = threadPool.submit(new LoadingTaskRunner(loadingHostedServicesTask)); loadServicesFutures.add(submitHostedServices); // locations LoadingLocationsTask loadingLocationsTask = new LoadingLocationsTask(publishData); if (listener != null) { loadingLocationsTask.addLoadingAccountListener(listener); } Future<?> submitLocations = threadPool.submit(new LoadingTaskRunner(loadingLocationsTask)); loadServicesFutures.add(submitLocations); // storage accounts LoadingStorageAccountTask loadingStorageAccountTask = new LoadingStorageAccountTask(publishData); if (listener != null) { loadingStorageAccountTask.addLoadingAccountListener(listener); } Future<?> submitStorageAccounts = threadPool.submit(new LoadingTaskRunner(loadingStorageAccountTask)); loadServicesFutures.add(submitStorageAccounts); for (Future<?> future : loadServicesFutures) { future.get(OPERATIONS_TIMEOUT, TimeUnit.SECONDS); } for (Subscription sub : publishData.getPublishProfile().getSubscriptions()) { /* * Get collection of storage services in each subscription. */ StorageServices services = publishData.getStoragesPerSubscription().get(sub.getId()); if (services != null) { for (StorageService strgService : services) { List<URI> endpoints = strgService.getStorageAccountProperties().getEndpoints(); if (endpoints.get(0).toString().startsWith("https://")) { endpoints.set(0, URI.create(endpoints.get(0).toString().replaceFirst("https://", "http://"))); endpoints.set(1, URI.create(endpoints.get(1).toString().replaceFirst("https://", "http://"))); endpoints.set(2, URI.create(endpoints.get(2).toString().replaceFirst("https://", "http://"))); } } } } } catch (InterruptedException e) { if (loadSubscriptionsFuture != null) { loadSubscriptionsFuture.cancel(true); } if (loadServicesFutures != null) { for (Future<?> future : loadServicesFutures) { future.cancel(true); } } canceled = true; } catch (ExecutionException e) { } catch (TimeoutException e) { } } if (publishData.getPublishProfile().getSubscriptions().size() > 0) { if (!empty(publishData) && !canceled) { removeDuplicateSubscriptions(publishData); PUBLISHS.add(publishData); publishData.isInitializing().compareAndSet(true, false); currentPublishData = publishData; } } } private static void removeDuplicateSubscriptions(PublishData publishData) { Set<String> subscriptionIdsToRemove = new HashSet<String>(); List<Subscription> subscriptionsOfPublishDataToCache = publishData.getPublishProfile().getSubscriptions(); for (Subscription subscriptionOfPublishDataToCache : subscriptionsOfPublishDataToCache) { for (PublishData pd : PUBLISHS) { for (Subscription existingSubscription : pd.getPublishProfile().getSubscriptions()) { if (existingSubscription.getId().equals(subscriptionOfPublishDataToCache.getId())) { subscriptionIdsToRemove.add(existingSubscription.getId()); } } } } for (String subscriptionId : subscriptionIdsToRemove) { removeSubscription(subscriptionId); } List<PublishData> emptyPublishDatas = new ArrayList<PublishData>(); for (PublishData pd : PUBLISHS) { if (pd.getPublishProfile().getSubscriptions().isEmpty()) { emptyPublishDatas.add(pd); } } for (PublishData emptyData : emptyPublishDatas) { PUBLISHS.remove(emptyData); } } private static boolean empty(PublishData data) { return WizardCacheManagerUtilMethods.empty(data); } public static StorageService getStorageAccountFromCurrentPublishData(String storageAccountName) { return WizardCacheManagerUtilMethods.getStorageAccountFromCurrentPublishData(storageAccountName, currentPublishData); } private static String checkSchemaVersionAndReturnUrl() { return WizardCacheManagerUtilMethods.checkSchemaVersionAndReturnUrl(currentPublishData); } public static String getPublishSettingsPath(String subscriptionID) { return publishSettingsPerSubscriptionMap.get(subscriptionID); } public static Map<String, String> getPublishSettingsPerSubscription() { return publishSettingsPerSubscriptionMap; } public static void addPublishSettingsPerSubscription(Map<String, String> publishSettingsPerSubscription) { publishSettingsPerSubscriptionMap.putAll(publishSettingsPerSubscription); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.transport; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.core.Tuple; import org.elasticsearch.common.settings.AbstractScopedSettings; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; import java.util.stream.Collectors; public class ProxyConnectionStrategyTests extends ESTestCase { private final String clusterAlias = "cluster-alias"; private final String modeKey = RemoteConnectionStrategy.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias).getKey(); private final Settings settings = Settings.builder().put(modeKey, "proxy").build(); private final ConnectionProfile profile = RemoteConnectionStrategy.buildConnectionProfile("cluster", settings); private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); @Override public void tearDown() throws Exception { super.tearDown(); ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); } private MockTransportService startTransport(String id, Version version) { return startTransport(id, version, Settings.EMPTY); } public MockTransportService startTransport(final String id, final Version version, final Settings settings) { boolean success = false; final Settings s = Settings.builder() .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), clusterAlias) .put("node.name", id) .put(settings) .build(); MockTransportService newService = MockTransportService.createNewService(s, version, threadPool); try { newService.start(); newService.acceptIncomingRequests(); success = true; return newService; } finally { if (success == false) { newService.close(); } } } public void testProxyStrategyWillOpenExpectedNumberOfConnectionsToAddress() { try (MockTransportService transport1 = startTransport("node1", Version.CURRENT)) { TransportAddress address1 = transport1.boundAddress().publishAddress(); try (MockTransportService localService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool)) { localService.start(); localService.acceptIncomingRequests(); ClusterConnectionManager connectionManager = new ClusterConnectionManager(profile, localService.transport); int numOfConnections = randomIntBetween(4, 8); try (RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy(clusterAlias, localService, remoteConnectionManager, Settings.EMPTY, numOfConnections, address1.toString())) { assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); PlainActionFuture<Void> connectFuture = PlainActionFuture.newFuture(); strategy.connect(connectFuture); connectFuture.actionGet(); assertTrue(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); assertEquals(numOfConnections, connectionManager.size()); assertTrue(strategy.assertNoRunningConnections()); } } } } // This test has failed once or twice in the past. This is enabled in case it were to fail again. @TestLogging( value = "org.elasticsearch.transport.ClusterConnectionManager:TRACE,org.elasticsearch.transport.ProxyConnectionStrategy:TRACE", reason = "to ensure that connections are logged") public void testProxyStrategyWillOpenNewConnectionsOnDisconnect() throws Exception { try (MockTransportService transport1 = startTransport("node1", Version.CURRENT); MockTransportService transport2 = startTransport("node2", Version.CURRENT)) { TransportAddress address1 = transport1.boundAddress().publishAddress(); TransportAddress address2 = transport2.boundAddress().publishAddress(); try (MockTransportService localService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool)) { localService.start(); localService.acceptIncomingRequests(); ClusterConnectionManager connectionManager = new ClusterConnectionManager(profile, localService.transport); int numOfConnections = randomIntBetween(4, 8); AtomicBoolean useAddress1 = new AtomicBoolean(true); try (RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy(clusterAlias, localService, remoteConnectionManager, Settings.EMPTY, numOfConnections, address1.toString(), alternatingResolver(address1, address2, useAddress1), null)) { assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address2))); PlainActionFuture<Void> connectFuture = PlainActionFuture.newFuture(); strategy.connect(connectFuture); connectFuture.actionGet(); assertTrue(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); long initialConnectionsToTransport2 = connectionManager.getAllConnectedNodes().stream() .filter(n -> n.getAddress().equals(address2)) .count(); assertEquals(0, initialConnectionsToTransport2); assertEquals(numOfConnections, connectionManager.size()); assertTrue(strategy.assertNoRunningConnections()); useAddress1.set(false); transport1.close(); assertBusy(() -> { assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); // Connections now pointing to transport2 long finalConnectionsToTransport2 = connectionManager.getAllConnectedNodes().stream() .filter(n -> n.getAddress().equals(address2)) .count(); assertNotEquals(0, finalConnectionsToTransport2); assertEquals(numOfConnections, connectionManager.size()); assertTrue(strategy.assertNoRunningConnections()); }); } } } } public void testConnectFailsWithIncompatibleNodes() { Version incompatibleVersion = Version.CURRENT.minimumCompatibilityVersion().minimumCompatibilityVersion(); try (MockTransportService transport1 = startTransport("incompatible-node", incompatibleVersion)) { TransportAddress address1 = transport1.boundAddress().publishAddress(); try (MockTransportService localService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool)) { localService.start(); localService.acceptIncomingRequests(); ClusterConnectionManager connectionManager = new ClusterConnectionManager(profile, localService.transport); int numOfConnections = randomIntBetween(4, 8); try (RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy(clusterAlias, localService, remoteConnectionManager, Settings.EMPTY, numOfConnections, address1.toString())) { PlainActionFuture<Void> connectFuture = PlainActionFuture.newFuture(); strategy.connect(connectFuture); expectThrows(Exception.class, connectFuture::actionGet); assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); assertEquals(0, connectionManager.size()); assertTrue(strategy.assertNoRunningConnections()); } } } } public void testClusterNameValidationPreventConnectingToDifferentClusters() throws Exception { Settings otherSettings = Settings.builder().put("cluster.name", "otherCluster").build(); try (MockTransportService transport1 = startTransport("cluster1", Version.CURRENT); MockTransportService transport2 = startTransport("cluster2", Version.CURRENT, otherSettings)) { TransportAddress address1 = transport1.boundAddress().publishAddress(); TransportAddress address2 = transport2.boundAddress().publishAddress(); try (MockTransportService localService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool)) { localService.start(); localService.acceptIncomingRequests(); ClusterConnectionManager connectionManager = new ClusterConnectionManager(profile, localService.transport); int numOfConnections = randomIntBetween(4, 8); AtomicBoolean useAddress1 = new AtomicBoolean(true); try (RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy(clusterAlias, localService, remoteConnectionManager, Settings.EMPTY, numOfConnections, address1.toString(), alternatingResolver(address1, address2, useAddress1), null)) { assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address2))); PlainActionFuture<Void> connectFuture = PlainActionFuture.newFuture(); strategy.connect(connectFuture); connectFuture.actionGet(); assertTrue(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address2))); useAddress1.set(false); transport1.close(); assertBusy(() -> { assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); assertTrue(strategy.assertNoRunningConnections()); long finalConnectionsToTransport2 = connectionManager.getAllConnectedNodes().stream() .filter(n -> n.getAddress().equals(address2)) .count(); // Connections not pointing to transport2 because the cluster name is different assertEquals(0, finalConnectionsToTransport2); assertEquals(0, connectionManager.size()); }); } } } } public void testProxyStrategyWillResolveAddressesEachConnect() throws Exception { try (MockTransportService transport1 = startTransport("seed_node", Version.CURRENT)) { TransportAddress address = transport1.boundAddress().publishAddress(); CountDownLatch multipleResolveLatch = new CountDownLatch(2); Supplier<TransportAddress> addressSupplier = () -> { multipleResolveLatch.countDown(); return address; }; try (MockTransportService localService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool)) { localService.start(); localService.acceptIncomingRequests(); ClusterConnectionManager connectionManager = new ClusterConnectionManager(profile, localService.transport); int numOfConnections = randomIntBetween(4, 8); try (RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy(clusterAlias, localService, remoteConnectionManager, Settings.EMPTY, numOfConnections, address.toString(), addressSupplier, null)) { PlainActionFuture<Void> connectFuture = PlainActionFuture.newFuture(); strategy.connect(connectFuture); connectFuture.actionGet(); remoteConnectionManager.getAnyRemoteConnection().close(); assertTrue(multipleResolveLatch.await(30L, TimeUnit.SECONDS)); } } } } public void testProxyStrategyWillNeedToBeRebuiltIfNumOfSocketsOrAddressesOrServerNameChange() { try (MockTransportService remoteTransport = startTransport("node1", Version.CURRENT)) { TransportAddress remoteAddress = remoteTransport.boundAddress().publishAddress(); try (MockTransportService localService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool)) { localService.start(); localService.acceptIncomingRequests(); ClusterConnectionManager connectionManager = new ClusterConnectionManager(profile, localService.transport); int numOfConnections = randomIntBetween(4, 8); try (RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy(clusterAlias, localService, remoteConnectionManager, Settings.EMPTY, numOfConnections, remoteAddress.toString(), "server-name")) { PlainActionFuture<Void> connectFuture = PlainActionFuture.newFuture(); strategy.connect(connectFuture); connectFuture.actionGet(); assertTrue(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(remoteAddress))); assertEquals(numOfConnections, connectionManager.size()); assertTrue(strategy.assertNoRunningConnections()); Setting<?> modeSetting = RemoteConnectionStrategy.REMOTE_CONNECTION_MODE .getConcreteSettingForNamespace("cluster-alias"); Setting<?> addressesSetting = ProxyConnectionStrategy.PROXY_ADDRESS .getConcreteSettingForNamespace("cluster-alias"); Setting<?> socketConnections = ProxyConnectionStrategy.REMOTE_SOCKET_CONNECTIONS .getConcreteSettingForNamespace("cluster-alias"); Setting<?> serverName = ProxyConnectionStrategy.SERVER_NAME .getConcreteSettingForNamespace("cluster-alias"); Settings noChange = Settings.builder() .put(modeSetting.getKey(), "proxy") .put(addressesSetting.getKey(), remoteAddress.toString()) .put(socketConnections.getKey(), numOfConnections) .put(serverName.getKey(), "server-name") .build(); assertFalse(strategy.shouldRebuildConnection(noChange)); Settings addressesChanged = Settings.builder() .put(modeSetting.getKey(), "proxy") .put(addressesSetting.getKey(), remoteAddress.toString()) .build(); assertTrue(strategy.shouldRebuildConnection(addressesChanged)); Settings socketsChanged = Settings.builder() .put(modeSetting.getKey(), "proxy") .put(addressesSetting.getKey(), remoteAddress.toString()) .put(socketConnections.getKey(), numOfConnections + 1) .build(); assertTrue(strategy.shouldRebuildConnection(socketsChanged)); Settings serverNameChange = Settings.builder() .put(modeSetting.getKey(), "proxy") .put(addressesSetting.getKey(), remoteAddress.toString()) .put(socketConnections.getKey(), numOfConnections) .put(serverName.getKey(), "server-name2") .build(); assertTrue(strategy.shouldRebuildConnection(serverNameChange)); } } } } public void testModeSettingsCannotBeUsedWhenInDifferentMode() { List<Tuple<Setting.AffixSetting<?>, String>> restrictedSettings = Arrays.asList( new Tuple<>(ProxyConnectionStrategy.PROXY_ADDRESS, "192.168.0.1:8080"), new Tuple<>(ProxyConnectionStrategy.REMOTE_SOCKET_CONNECTIONS, "3")); RemoteConnectionStrategy.ConnectionStrategy sniff = RemoteConnectionStrategy.ConnectionStrategy.SNIFF; String clusterName = "cluster_name"; Settings settings = Settings.builder() .put(RemoteConnectionStrategy.REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterName).getKey(), sniff.name()) .build(); Set<Setting<?>> clusterSettings = new HashSet<>(); clusterSettings.add(RemoteConnectionStrategy.REMOTE_CONNECTION_MODE); clusterSettings.addAll(restrictedSettings.stream().map(Tuple::v1).collect(Collectors.toList())); AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY, clusterSettings); // Should validate successfully service.validate(settings, true); for (Tuple<Setting.AffixSetting<?>, String> restrictedSetting : restrictedSettings) { Setting<?> concreteSetting = restrictedSetting.v1().getConcreteSettingForNamespace(clusterName); Settings invalid = Settings.builder().put(settings).put(concreteSetting.getKey(), restrictedSetting.v2()).build(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> service.validate(invalid, true)); String expected = "Setting \"" + concreteSetting.getKey() + "\" cannot be used with the configured " + "\"cluster.remote.cluster_name.mode\" [required=PROXY, configured=SNIFF]"; assertEquals(expected, iae.getMessage()); } } public void testServerNameAttributes() { Settings bindSettings = Settings.builder().put(TransportSettings.BIND_HOST.getKey(), "localhost").build(); try (MockTransportService transport1 = startTransport("node1", Version.CURRENT, bindSettings)) { TransportAddress address1 = transport1.boundAddress().publishAddress(); try (MockTransportService localService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool)) { localService.start(); localService.acceptIncomingRequests(); String address = "localhost:" + address1.getPort(); ClusterConnectionManager connectionManager = new ClusterConnectionManager(profile, localService.transport); int numOfConnections = randomIntBetween(4, 8); try (RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy(clusterAlias, localService, remoteConnectionManager, Settings.EMPTY, numOfConnections, address, "localhost")) { assertFalse(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); PlainActionFuture<Void> connectFuture = PlainActionFuture.newFuture(); strategy.connect(connectFuture); connectFuture.actionGet(); assertTrue(connectionManager.getAllConnectedNodes().stream().anyMatch(n -> n.getAddress().equals(address1))); assertTrue(strategy.assertNoRunningConnections()); DiscoveryNode discoveryNode = connectionManager.getAllConnectedNodes().stream().findFirst().get(); assertEquals("localhost", discoveryNode.getAttributes().get("server_name")); } } } } private Supplier<TransportAddress> alternatingResolver(TransportAddress address1, TransportAddress address2, AtomicBoolean useAddress1) { return () -> { if (useAddress1.get()) { return address1; } else { return address2; } }; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.optimizer.calcite; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.plan.RelOptSchema; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.plan.RelOptUtil.InputFinder; import org.apache.calcite.prepare.RelOptTableImpl; import org.apache.calcite.rel.RelCollation; import org.apache.calcite.rel.RelCollationTraitDef; import org.apache.calcite.rel.RelDistribution; import org.apache.calcite.rel.RelFieldCollation; import org.apache.calcite.rel.RelFieldCollation.Direction; import org.apache.calcite.rel.RelFieldCollation.NullDirection; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelReferentialConstraint; import org.apache.calcite.rel.RelReferentialConstraintImpl; import org.apache.calcite.rel.logical.LogicalTableScan; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexNode; import org.apache.calcite.schema.ColumnStrategy; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; import org.apache.calcite.util.mapping.IntPair; import org.apache.hadoop.hive.common.TableName; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Order; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo; import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo.ForeignKeyCol; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.PartitionIterable; import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.UniqueConstraint; import org.apache.hadoop.hive.ql.metadata.UniqueConstraint.UniqueConstraintCol; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.ExprNodeConverter; import org.apache.hadoop.hive.ql.optimizer.ppr.PartitionPruner; import org.apache.hadoop.hive.ql.parse.ColumnStatsList; import org.apache.hadoop.hive.ql.parse.ParsedQueryTables; import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; import org.apache.hadoop.hive.ql.plan.ColStatistics; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.Statistics; import org.apache.hadoop.hive.ql.plan.Statistics.State; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.ql.stats.StatsUtils; import org.apache.hadoop.hive.ql.util.DirectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.collect.Sets; public class RelOptHiveTable implements RelOptTable { //~ Instance fields -------------------------------------------------------- private final RelOptSchema schema; private final RelDataTypeFactory typeFactory; private final RelDataType rowType; private final List<String> qualifiedTblName; private final String name; private final Table hiveTblMetadata; private final ImmutableList<ColumnInfo> hiveNonPartitionCols; private final ImmutableList<ColumnInfo> hivePartitionCols; private final Map<Integer, ColStatistics> hiveColStatsMap; private final ImmutableMap<Integer, ColumnInfo> hiveNonPartitionColsMap; private final ImmutableMap<Integer, ColumnInfo> hivePartitionColsMap; private final ImmutableList<VirtualColumn> hiveVirtualCols; private final int noOfNonVirtualCols; private final List<ImmutableBitSet> keys; private final List<ImmutableBitSet> nonNullablekeys; private List<RelReferentialConstraint> referentialConstraints; private boolean fetchedReferentialConstraints; private final HiveConf hiveConf; private final Hive db; private final ParsedQueryTables tablesCache; private final Map<String, PrunedPartitionList> partitionCache; private final Map<String, ColumnStatsList> colStatsCache; private final AtomicInteger noColsMissingStats; private double rowCount = -1; PrunedPartitionList partitionList; protected static final Logger LOG = LoggerFactory.getLogger(RelOptHiveTable.class.getName()); public RelOptHiveTable(RelOptSchema calciteSchema, RelDataTypeFactory typeFactory, List<String> qualifiedTblName, RelDataType rowType, Table hiveTblMetadata, List<ColumnInfo> hiveNonPartitionCols, List<ColumnInfo> hivePartitionCols, List<VirtualColumn> hiveVirtualCols, HiveConf hconf, Hive db, ParsedQueryTables tabNameToTabObject, Map<String, PrunedPartitionList> partitionCache, Map<String, ColumnStatsList> colStatsCache, AtomicInteger noColsMissingStats) { this.schema = calciteSchema; this.typeFactory = typeFactory; this.qualifiedTblName = ImmutableList.copyOf(qualifiedTblName); this.name = this.qualifiedTblName.stream().collect(Collectors.joining(".")); this.rowType = rowType; this.hiveTblMetadata = hiveTblMetadata; this.hiveColStatsMap = new HashMap<>(); this.hiveNonPartitionCols = ImmutableList.copyOf(hiveNonPartitionCols); this.hiveNonPartitionColsMap = HiveCalciteUtil.getColInfoMap(hiveNonPartitionCols, 0); this.hivePartitionCols = ImmutableList.copyOf(hivePartitionCols); this.hivePartitionColsMap = HiveCalciteUtil.getColInfoMap(hivePartitionCols, hiveNonPartitionColsMap.size()); this.noOfNonVirtualCols = hiveNonPartitionCols.size() + hivePartitionCols.size(); this.hiveVirtualCols = ImmutableList.copyOf(hiveVirtualCols); this.hiveConf = hconf; this.db = db; this.tablesCache = tabNameToTabObject; this.partitionCache = partitionCache; this.colStatsCache = colStatsCache; this.noColsMissingStats = noColsMissingStats; Pair<List<ImmutableBitSet>, List<ImmutableBitSet>> constraintKeys = generateKeys(); this.keys = constraintKeys.left; this.nonNullablekeys = constraintKeys.right; } //~ Methods ---------------------------------------------------------------- public String getName() { return name; } @Override public List<String> getQualifiedName() { return qualifiedTblName; } @Override public RelDataType getRowType() { return rowType; } @Override public RelOptSchema getRelOptSchema() { return schema; } public RelDataTypeFactory getTypeFactory() { return typeFactory; } @Override public Expression getExpression(Class clazz) { throw new UnsupportedOperationException(); } public List<ImmutableBitSet> getNonNullableKeys() { return nonNullablekeys; } @Override public RelOptTable extend(List<RelDataTypeField> extendedFields) { throw new UnsupportedOperationException(); } @Override public List<ColumnStrategy> getColumnStrategies() { return RelOptTableImpl.columnStrategies(this); } public RelOptHiveTable copy(RelDataType newRowType) { // 1. Build map of column name to col index of original schema // Assumption: Hive Table can not contain duplicate column names Map<String, Integer> nameToColIndxMap = new HashMap<String, Integer>(); for (RelDataTypeField f : this.rowType.getFieldList()) { nameToColIndxMap.put(f.getName(), f.getIndex()); } // 2. Build nonPart/Part/Virtual column info for new RowSchema List<ColumnInfo> newHiveNonPartitionCols = new ArrayList<ColumnInfo>(); List<ColumnInfo> newHivePartitionCols = new ArrayList<ColumnInfo>(); List<VirtualColumn> newHiveVirtualCols = new ArrayList<VirtualColumn>(); Map<Integer, VirtualColumn> virtualColInfoMap = HiveCalciteUtil.getVColsMap(this.hiveVirtualCols, this.noOfNonVirtualCols); Integer originalColIndx; ColumnInfo cInfo; VirtualColumn vc; for (RelDataTypeField f : newRowType.getFieldList()) { originalColIndx = nameToColIndxMap.get(f.getName()); if ((cInfo = hiveNonPartitionColsMap.get(originalColIndx)) != null) { newHiveNonPartitionCols.add(new ColumnInfo(cInfo)); } else if ((cInfo = hivePartitionColsMap.get(originalColIndx)) != null) { newHivePartitionCols.add(new ColumnInfo(cInfo)); } else if ((vc = virtualColInfoMap.get(originalColIndx)) != null) { newHiveVirtualCols.add(vc); } else { throw new RuntimeException("Copy encountered a column not seen in original TS"); } } // 3. Build new Table return new RelOptHiveTable(this.schema, this.typeFactory, this.qualifiedTblName, newRowType, this.hiveTblMetadata, newHiveNonPartitionCols, newHivePartitionCols, newHiveVirtualCols, this.hiveConf, this.db, this.tablesCache, this.partitionCache, this.colStatsCache, this.noColsMissingStats); } // Given a key this method returns true if all of the columns in the key are not nullable public boolean isNonNullableKey(ImmutableBitSet columns) { for (ImmutableBitSet key : nonNullablekeys) { if (columns.contains(key)) { return true; } } return false; } @Override public boolean isKey(ImmutableBitSet columns) { for (ImmutableBitSet key : keys) { if (columns.contains(key)) { return true; } } return false; } public boolean hasReferentialConstraints() { ForeignKeyInfo foreignKeyInfo = hiveTblMetadata.getForeignKeyInfo(); return foreignKeyInfo != null && !foreignKeyInfo.getForeignKeys().isEmpty(); } @Override public List<ImmutableBitSet> getKeys() { return keys; } @Override public List<RelReferentialConstraint> getReferentialConstraints() { // Do a lazy load here. We only want to fetch the constraint tables that // are used in the query. if (!fetchedReferentialConstraints) { referentialConstraints = generateReferentialConstraints(); fetchedReferentialConstraints = true; } return referentialConstraints; } private Pair<List<ImmutableBitSet>, List<ImmutableBitSet>> generateKeys() { final PrimaryKeyInfo primaryKeyInfo = hiveTblMetadata.getPrimaryKeyInfo(); final UniqueConstraint uniqueKeyInfo = hiveTblMetadata.getUniqueKeyInfo(); ImmutableList.Builder<ImmutableBitSet> builder = ImmutableList.builder(); ImmutableList.Builder<ImmutableBitSet> nonNullbuilder = ImmutableList.builder(); // First PK if (primaryKeyInfo != null && !primaryKeyInfo.getColNames().isEmpty()) { ImmutableBitSet.Builder keys = ImmutableBitSet.builder(); for (String pkColName : primaryKeyInfo.getColNames().values()) { int pkPos; for (pkPos = 0; pkPos < rowType.getFieldNames().size(); pkPos++) { String colName = rowType.getFieldNames().get(pkPos); if (pkColName.equals(colName)) { break; } } if (pkPos == rowType.getFieldNames().size()) { LOG.error("Column for primary key definition " + pkColName + " not found"); } keys.set(pkPos); } ImmutableBitSet key = keys.build(); builder.add(key); nonNullbuilder.add(key); } // Then UKs if (uniqueKeyInfo != null && !uniqueKeyInfo.getUniqueConstraints().isEmpty()) { for (List<UniqueConstraintCol> ukCols : uniqueKeyInfo.getUniqueConstraints().values()) { ImmutableBitSet.Builder keys = ImmutableBitSet.builder(); boolean isNonNullable = true; for (UniqueConstraintCol ukCol : ukCols) { int ukPos; for (ukPos = 0; ukPos < rowType.getFieldNames().size(); ukPos++) { String colName = rowType.getFieldNames().get(ukPos); if (ukCol.colName.equals(colName)) { if (rowType.getFieldList().get(ukPos).getType().isNullable()) { // they should all be nullable isNonNullable = false; } break; } } if (ukPos == rowType.getFieldNames().size()) { LOG.error("Column for unique constraint definition " + ukCol.colName + " not found"); } keys.set(ukPos); } ImmutableBitSet key = keys.build(); builder.add(key); if (isNonNullable) { nonNullbuilder.add(key); } } } return new Pair<>(builder.build(), nonNullbuilder.build()); } private List<RelReferentialConstraint> generateReferentialConstraints() { final ForeignKeyInfo foreignKeyInfo = hiveTblMetadata.getForeignKeyInfo(); ImmutableList.Builder<RelReferentialConstraint> builder = ImmutableList.builder(); if (foreignKeyInfo != null && !foreignKeyInfo.getForeignKeys().isEmpty()) { for (List<ForeignKeyCol> fkCols : foreignKeyInfo.getForeignKeys().values()) { String parentDatabaseName = fkCols.get(0).parentDatabaseName; String parentTableName = fkCols.get(0).parentTableName; String qualifiedName; List<String> parentTableQualifiedName = new ArrayList<>(); if (parentDatabaseName != null && !parentDatabaseName.isEmpty()) { parentTableQualifiedName.add(parentDatabaseName); parentTableQualifiedName.add(parentTableName); qualifiedName = TableName.getDbTable( parentDatabaseName, parentTableName); } else { parentTableQualifiedName.add(parentTableName); qualifiedName = parentTableName; } Table parentTab = tablesCache.getParsedTable(qualifiedName); if (parentTab == null) { // Table doesn't exist in the cache, so we don't need to track // these referential constraints. continue; } ImmutableList.Builder<IntPair> keys = ImmutableList.builder(); for (ForeignKeyCol fkCol : fkCols) { int fkPos; for (fkPos = 0; fkPos < rowType.getFieldNames().size(); fkPos++) { String fkColName = rowType.getFieldNames().get(fkPos); if (fkColName.equals(fkCol.childColName)) { break; } } int pkPos; for (pkPos = 0; pkPos < parentTab.getAllCols().size(); pkPos++) { String pkColName = parentTab.getAllCols().get(pkPos).getName(); if (pkColName.equals(fkCol.parentColName)) { break; } } if (fkPos == rowType.getFieldNames().size() || pkPos == parentTab.getAllCols().size()) { LOG.error("Column for foreign key definition " + fkCol + " not found"); continue; } keys.add(IntPair.of(fkPos, pkPos)); } builder.add(RelReferentialConstraintImpl.of(qualifiedTblName, parentTableQualifiedName, keys.build())); } } return builder.build(); } @Override public RelNode toRel(ToRelContext context) { return new LogicalTableScan(context.getCluster(), this); } @Override public <T> T unwrap(Class<T> arg0) { return arg0.isInstance(this) ? arg0.cast(this) : null; } @Override public List<RelCollation> getCollationList() { ImmutableList.Builder<RelFieldCollation> collationList = new ImmutableList.Builder<RelFieldCollation>(); for (Order sortColumn : this.hiveTblMetadata.getSortCols()) { for (int i=0; i<this.hiveTblMetadata.getSd().getCols().size(); i++) { FieldSchema field = this.hiveTblMetadata.getSd().getCols().get(i); if (field.getName().equals(sortColumn.getCol())) { Direction direction = DirectionUtils.codeToDirection(sortColumn.getOrder()); NullDirection nullDirection = sortColumn.getOrder() == DirectionUtils.ASCENDING_CODE ? NullDirection.FIRST : NullDirection.LAST; collationList.add(new RelFieldCollation(i, direction, nullDirection)); break; } } } return new ImmutableList.Builder<RelCollation>() .add(RelCollationTraitDef.INSTANCE.canonize( new HiveRelCollation(collationList.build()))) .build(); } @Override public RelDistribution getDistribution() { ImmutableList.Builder<Integer> columnPositions = new ImmutableList.Builder<Integer>(); for (String bucketColumn : this.hiveTblMetadata.getBucketCols()) { for (int i=0; i<this.hiveTblMetadata.getSd().getCols().size(); i++) { FieldSchema field = this.hiveTblMetadata.getSd().getCols().get(i); if (field.getName().equals(bucketColumn)) { columnPositions.add(i); break; } } } return new HiveRelDistribution(RelDistribution.Type.HASH_DISTRIBUTED, columnPositions.build()); } @Override public double getRowCount() { if (rowCount == -1) { if (null == partitionList) { // we are here either unpartitioned table or partitioned table with no // predicates computePartitionList(hiveConf, null, new HashSet<Integer>()); } rowCount = StatsUtils.getNumRows(hiveConf, getNonPartColumns(), hiveTblMetadata, partitionList, noColsMissingStats); } return rowCount; } public Table getHiveTableMD() { return hiveTblMetadata; } private String getColNamesForLogging(Set<String> colLst) { StringBuilder sb = new StringBuilder(); boolean firstEntry = true; for (String colName : colLst) { if (firstEntry) { sb.append(colName); firstEntry = false; } else { sb.append(", " + colName); } } return sb.toString(); } public void computePartitionList(HiveConf conf, RexNode pruneNode, Set<Integer> partOrVirtualCols) { try { if (!hiveTblMetadata.isPartitioned() || pruneNode == null || InputFinder.bits(pruneNode).length() == 0) { // there is no predicate on partitioning column, we need all partitions // in this case. partitionList = PartitionPruner.prune(hiveTblMetadata, null, conf, getName(), partitionCache); return; } // We have valid pruning expressions, only retrieve qualifying partitions ExprNodeDesc pruneExpr = pruneNode.accept(new ExprNodeConverter(getName(), getRowType(), partOrVirtualCols, getTypeFactory())); partitionList = PartitionPruner.prune(hiveTblMetadata, pruneExpr, conf, getName(), partitionCache); } catch (HiveException he) { throw new RuntimeException(he); } } private void updateColStats(Set<Integer> projIndxLst, boolean allowMissingStats) { List<String> nonPartColNamesThatRqrStats = new ArrayList<String>(); List<Integer> nonPartColIndxsThatRqrStats = new ArrayList<Integer>(); List<String> partColNamesThatRqrStats = new ArrayList<String>(); List<Integer> partColIndxsThatRqrStats = new ArrayList<Integer>(); Set<String> colNamesFailedStats = new HashSet<String>(); // 1. Separate required columns to Non Partition and Partition Cols ColumnInfo tmp; for (Integer pi : projIndxLst) { if (hiveColStatsMap.get(pi) == null) { if ((tmp = hiveNonPartitionColsMap.get(pi)) != null) { nonPartColNamesThatRqrStats.add(tmp.getInternalName()); nonPartColIndxsThatRqrStats.add(pi); } else if ((tmp = hivePartitionColsMap.get(pi)) != null) { partColNamesThatRqrStats.add(tmp.getInternalName()); partColIndxsThatRqrStats.add(pi); } else { noColsMissingStats.getAndIncrement(); String logMsg = "Unable to find Column Index: " + pi + ", in " + hiveTblMetadata.getCompleteName(); LOG.error(logMsg); throw new RuntimeException(logMsg); } } } if (null == partitionList) { // We could be here either because its an unpartitioned table or because // there are no pruning predicates on a partitioned table. computePartitionList(hiveConf, null, new HashSet<Integer>()); } String partitionListKey = partitionList.getKey().orElse(null); ColumnStatsList colStatsCached = colStatsCache.get(partitionListKey); if (colStatsCached == null) { colStatsCached = new ColumnStatsList(); colStatsCache.put(partitionListKey, colStatsCached); } // 2. Obtain Col Stats for Non Partition Cols if (nonPartColNamesThatRqrStats.size() > 0) { List<ColStatistics> hiveColStats = new ArrayList<ColStatistics>(); if (!hiveTblMetadata.isPartitioned()) { // 2.1 Handle the case for unpartitioned table. try { Statistics stats = StatsUtils.collectStatistics(hiveConf, null, hiveTblMetadata, hiveNonPartitionCols, nonPartColNamesThatRqrStats, colStatsCached, nonPartColNamesThatRqrStats, true); rowCount = stats.getNumRows(); for (String c : nonPartColNamesThatRqrStats) { ColStatistics cs = stats.getColumnStatisticsFromColName(c); if (cs != null) { hiveColStats.add(cs); } } colStatsCached.updateState(stats.getColumnStatsState()); // 2.1.1 Record Column Names that we needed stats for but couldn't if (hiveColStats.isEmpty()) { colNamesFailedStats.addAll(nonPartColNamesThatRqrStats); } else if (hiveColStats.size() != nonPartColNamesThatRqrStats.size()) { Set<String> setOfFiledCols = new HashSet<String>(nonPartColNamesThatRqrStats); Set<String> setOfObtainedColStats = new HashSet<String>(); for (ColStatistics cs : hiveColStats) { setOfObtainedColStats.add(cs.getColumnName()); } setOfFiledCols.removeAll(setOfObtainedColStats); colNamesFailedStats.addAll(setOfFiledCols); } else { // Column stats in hiveColStats might not be in the same order as the columns in // nonPartColNamesThatRqrStats. reorder hiveColStats so we can build hiveColStatsMap // using nonPartColIndxsThatRqrStats as below Map<String, ColStatistics> columnStatsMap = new HashMap<String, ColStatistics>(hiveColStats.size()); for (ColStatistics cs : hiveColStats) { columnStatsMap.put(cs.getColumnName(), cs); // even though the stats were estimated we need to warn user that // stats are not available if(cs.isEstimated()) { colNamesFailedStats.add(cs.getColumnName()); } } hiveColStats.clear(); for (String colName : nonPartColNamesThatRqrStats) { hiveColStats.add(columnStatsMap.get(colName)); } } } catch (HiveException e) { String logMsg = "Collecting stats for table: " + hiveTblMetadata.getTableName() + " failed."; LOG.error(logMsg, e); throw new RuntimeException(logMsg, e); } } else { // 2.2 Obtain col stats for partitioned table. try { if (partitionList.getNotDeniedPartns().isEmpty()) { // no need to make a metastore call rowCount = 0; hiveColStats = new ArrayList<ColStatistics>(); for (int i = 0; i < nonPartColNamesThatRqrStats.size(); i++) { // add empty stats object for each column hiveColStats.add( new ColStatistics( nonPartColNamesThatRqrStats.get(i), hiveNonPartitionColsMap.get(nonPartColIndxsThatRqrStats.get(i)).getTypeName())); } colNamesFailedStats.clear(); colStatsCached.updateState(State.COMPLETE); } else { Statistics stats = StatsUtils.collectStatistics(hiveConf, partitionList, hiveTblMetadata, hiveNonPartitionCols, nonPartColNamesThatRqrStats, colStatsCached, nonPartColNamesThatRqrStats, true); rowCount = stats.getNumRows(); hiveColStats = new ArrayList<ColStatistics>(); for (String c : nonPartColNamesThatRqrStats) { ColStatistics cs = stats.getColumnStatisticsFromColName(c); if (cs != null) { hiveColStats.add(cs); if(cs.isEstimated()) { colNamesFailedStats.add(c); } } else { colNamesFailedStats.add(c); } } colStatsCached.updateState(stats.getColumnStatsState()); } } catch (HiveException e) { String logMsg = "Collecting stats failed."; LOG.error(logMsg, e); throw new RuntimeException(logMsg, e); } } if (hiveColStats != null && hiveColStats.size() == nonPartColNamesThatRqrStats.size()) { for (int i = 0; i < hiveColStats.size(); i++) { // the columns in nonPartColIndxsThatRqrStats/nonPartColNamesThatRqrStats/hiveColStats // are in same order hiveColStatsMap.put(nonPartColIndxsThatRqrStats.get(i), hiveColStats.get(i)); colStatsCached.put(hiveColStats.get(i).getColumnName(), hiveColStats.get(i)); if (LOG.isDebugEnabled()) { LOG.debug("Stats for column " + hiveColStats.get(i).getColumnName() + " in table " + hiveTblMetadata.getTableName() + " stored in cache"); LOG.debug(hiveColStats.get(i).toString()); } } } } // 3. Obtain Stats for Partition Cols if (colNamesFailedStats.isEmpty() && !partColNamesThatRqrStats.isEmpty()) { ColStatistics cStats = null; for (int i = 0; i < partColNamesThatRqrStats.size(); i++) { cStats = StatsUtils.getColStatsForPartCol(hivePartitionColsMap.get(partColIndxsThatRqrStats.get(i)), new PartitionIterable(partitionList.getNotDeniedPartns()), hiveConf); hiveColStatsMap.put(partColIndxsThatRqrStats.get(i), cStats); colStatsCached.put(cStats.getColumnName(), cStats); if (LOG.isDebugEnabled()) { LOG.debug("Stats for column " + cStats.getColumnName() + " in table " + hiveTblMetadata.getTableName() + " stored in cache"); LOG.debug(cStats.toString()); } } } // 4. Warn user if we could get stats for required columns if (!colNamesFailedStats.isEmpty()) { String logMsg = "No Stats for " + hiveTblMetadata.getCompleteName() + ", Columns: " + getColNamesForLogging(colNamesFailedStats); noColsMissingStats.getAndAdd(colNamesFailedStats.size()); if (allowMissingStats) { LOG.warn(logMsg); HiveConf conf = SessionState.getSessionConf(); if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_SHOW_WARNINGS)) { LogHelper console = SessionState.getConsole(); console.printInfo(logMsg); } } else { LOG.error(logMsg); throw new RuntimeException(logMsg); } } } public List<ColStatistics> getColStat(List<Integer> projIndxLst) { // If we allow estimated stats for the columns, then we shall set the boolean to true, // since otherwise we will throw an exception because columns with estimated stats are // actually added to the list of columns that do not contain stats. return getColStat(projIndxLst, HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_STATS_ESTIMATE_STATS)); } /** Note: DOES NOT CHECK txn stats. */ public List<ColStatistics> getColStat(List<Integer> projIndxLst, boolean allowMissingStats) { List<ColStatistics> colStatsBldr = Lists.newArrayList(); Set<Integer> projIndxSet = new HashSet<>(projIndxLst); for (Integer i : projIndxLst) { if (i >= noOfNonVirtualCols) { projIndxSet.remove(i); } else if (hiveColStatsMap.get(i) != null) { colStatsBldr.add(hiveColStatsMap.get(i)); projIndxSet.remove(i); } } if (!projIndxSet.isEmpty()) { LOG.info("Calculating column statistics for {}, projIndxSet: {}, allowMissingStats: {}", name, projIndxLst, allowMissingStats); updateColStats(projIndxSet, allowMissingStats); for (Integer i : projIndxSet) { colStatsBldr.add(hiveColStatsMap.get(i)); } } return colStatsBldr; } /* * use to check if a set of columns are all partition columns. true only if: - * all columns in BitSet are partition columns. */ public boolean containsPartitionColumnsOnly(ImmutableBitSet cols) { for (int i = cols.nextSetBit(0); i >= 0; i++, i = cols.nextSetBit(i + 1)) { if (!hivePartitionColsMap.containsKey(i)) { return false; } } return true; } public List<VirtualColumn> getVirtualCols() { return this.hiveVirtualCols; } public List<ColumnInfo> getPartColumns() { return this.hivePartitionCols; } public List<ColumnInfo> getNonPartColumns() { return this.hiveNonPartitionCols; } public int getNoOfNonVirtualCols() { return noOfNonVirtualCols; } public Map<Integer, ColumnInfo> getPartColInfoMap() { return hivePartitionColsMap; } public Map<Integer, ColumnInfo> getNonPartColInfoMap() { return hiveNonPartitionColsMap; } @Override public boolean equals(Object obj) { return obj instanceof RelOptHiveTable && this.rowType.equals(((RelOptHiveTable) obj).getRowType()) && this.getHiveTableMD().equals(((RelOptHiveTable) obj).getHiveTableMD()); } @Override public int hashCode() { return (this.getHiveTableMD() == null) ? super.hashCode() : this.getHiveTableMD().hashCode(); } public String getPartitionListKey() { return partitionList != null ? partitionList.getKey().orElse(null) : null; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeSet; import java.util.concurrent.ThreadLocalRandom; import org.apache.commons.io.IOUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.hdfs.BlockReader; import org.apache.hadoop.hdfs.BlockReaderFactory; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.RemotePeerFactory; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.net.Peer; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfoWithStorage; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus; import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataEncryptionKeyFactory; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoStriped; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicies; import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementStatus; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo; import org.apache.hadoop.hdfs.server.blockmanagement.NumberReplicas; import org.apache.hadoop.hdfs.server.datanode.CachingStrategy; import org.apache.hadoop.hdfs.util.LightWeightHashSet; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.net.NodeBase; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.tracing.TraceUtils; import org.apache.hadoop.util.Time; import org.apache.htrace.core.Tracer; import com.google.common.annotations.VisibleForTesting; /** * This class provides rudimentary checking of DFS volumes for errors and * sub-optimal conditions. * <p>The tool scans all files and directories, starting from an indicated * root path. The following abnormal conditions are detected and handled:</p> * <ul> * <li>files with blocks that are completely missing from all datanodes.<br/> * In this case the tool can perform one of the following actions: * <ul> * <li>none ({@link #FIXING_NONE})</li> * <li>move corrupted files to /lost+found directory on DFS * ({@link #FIXING_MOVE}). Remaining data blocks are saved as a * block chains, representing longest consecutive series of valid blocks.</li> * <li>delete corrupted files ({@link #FIXING_DELETE})</li> * </ul> * </li> * <li>detect files with under-replicated or over-replicated blocks</li> * </ul> * Additionally, the tool collects a detailed overall DFS statistics, and * optionally can print detailed statistics on block locations and replication * factors of each file. */ @InterfaceAudience.Private public class NamenodeFsck implements DataEncryptionKeyFactory { public static final Log LOG = LogFactory.getLog(NameNode.class.getName()); // return string marking fsck status public static final String CORRUPT_STATUS = "is CORRUPT"; public static final String HEALTHY_STATUS = "is HEALTHY"; public static final String DECOMMISSIONING_STATUS = "is DECOMMISSIONING"; public static final String DECOMMISSIONED_STATUS = "is DECOMMISSIONED"; public static final String NONEXISTENT_STATUS = "does not exist"; public static final String FAILURE_STATUS = "FAILED"; private final NameNode namenode; private final BlockManager blockManager; private final NetworkTopology networktopology; private final int totalDatanodes; private final InetAddress remoteAddress; private long totalDirs = 0L; private long totalSymlinks = 0L; private String lostFound = null; private boolean lfInited = false; private boolean lfInitedOk = false; private boolean showFiles = false; private boolean showOpenFiles = false; private boolean showBlocks = false; private boolean showLocations = false; private boolean showRacks = false; private boolean showStoragePolcies = false; private boolean showprogress = false; private boolean showCorruptFileBlocks = false; private boolean showReplicaDetails = false; private long staleInterval; private Tracer tracer; /** * True if we encountered an internal error during FSCK, such as not being * able to delete a corrupt file. */ private boolean internalError = false; /** * True if the user specified the -move option. * * Whe this option is in effect, we will copy salvaged blocks into the lost * and found. */ private boolean doMove = false; /** * True if the user specified the -delete option. * * Whe this option is in effect, we will delete corrupted files. */ private boolean doDelete = false; String path = "/"; private String blockIds = null; // We return back N files that are corrupt; the list of files returned is // ordered by block id; to allow continuation support, pass in the last block // # from previous call private final String[] currentCookie = new String[] { null }; private final Configuration conf; private final PrintWriter out; private List<String> snapshottableDirs = null; private final BlockPlacementPolicies bpPolicies; private StoragePolicySummary storageTypeSummary = null; /** * Filesystem checker. * @param conf configuration (namenode config) * @param namenode namenode that this fsck is going to use * @param pmap key=value[] map passed to the http servlet as url parameters * @param out output stream to write the fsck output * @param totalDatanodes number of live datanodes * @param remoteAddress source address of the fsck request */ NamenodeFsck(Configuration conf, NameNode namenode, NetworkTopology networktopology, Map<String,String[]> pmap, PrintWriter out, int totalDatanodes, InetAddress remoteAddress) { this.conf = conf; this.namenode = namenode; this.blockManager = namenode.getNamesystem().getBlockManager(); this.networktopology = networktopology; this.out = out; this.totalDatanodes = totalDatanodes; this.remoteAddress = remoteAddress; this.bpPolicies = new BlockPlacementPolicies(conf, null, networktopology, namenode.getNamesystem().getBlockManager().getDatanodeManager() .getHost2DatanodeMap()); this.staleInterval = conf.getLong(DFSConfigKeys.DFS_NAMENODE_STALE_DATANODE_INTERVAL_KEY, DFSConfigKeys.DFS_NAMENODE_STALE_DATANODE_INTERVAL_DEFAULT); this.tracer = new Tracer.Builder("NamenodeFsck"). conf(TraceUtils.wrapHadoopConf("namenode.fsck.htrace.", conf)). build(); for (Iterator<String> it = pmap.keySet().iterator(); it.hasNext();) { String key = it.next(); if (key.equals("path")) { this.path = pmap.get("path")[0]; } else if (key.equals("move")) { this.doMove = true; } else if (key.equals("delete")) { this.doDelete = true; } else if (key.equals("files")) { this.showFiles = true; } else if (key.equals("blocks")) { this.showBlocks = true; } else if (key.equals("locations")) { this.showLocations = true; } else if (key.equals("racks")) { this.showRacks = true; } else if (key.equals("replicadetails")) { this.showReplicaDetails = true; } else if (key.equals("storagepolicies")) { this.showStoragePolcies = true; } else if (key.equals("showprogress")) { this.showprogress = true; } else if (key.equals("openforwrite")) {this.showOpenFiles = true; } else if (key.equals("listcorruptfileblocks")) { this.showCorruptFileBlocks = true; } else if (key.equals("startblockafter")) { this.currentCookie[0] = pmap.get("startblockafter")[0]; } else if (key.equals("includeSnapshots")) { this.snapshottableDirs = new ArrayList<String>(); } else if (key.equals("blockId")) { this.blockIds = pmap.get("blockId")[0]; } } } /** * Check block information given a blockId number * */ public void blockIdCK(String blockId) { if(blockId == null) { out.println("Please provide valid blockId!"); return; } try { //get blockInfo Block block = new Block(Block.getBlockId(blockId)); //find which file this block belongs to BlockInfo blockInfo = blockManager.getStoredBlock(block); if(blockInfo == null) { out.println("Block "+ blockId +" " + NONEXISTENT_STATUS); LOG.warn("Block "+ blockId + " " + NONEXISTENT_STATUS); return; } final INodeFile iNode = namenode.getNamesystem().getBlockCollection(blockInfo); NumberReplicas numberReplicas= blockManager.countNodes(blockInfo); out.println("Block Id: " + blockId); out.println("Block belongs to: "+iNode.getFullPathName()); out.println("No. of Expected Replica: " + blockManager.getExpectedReplicaNum(blockInfo)); out.println("No. of live Replica: " + numberReplicas.liveReplicas()); out.println("No. of excess Replica: " + numberReplicas.excessReplicas()); out.println("No. of stale Replica: " + numberReplicas.replicasOnStaleNodes()); out.println("No. of decommissioned Replica: " + numberReplicas.decommissioned()); out.println("No. of decommissioning Replica: " + numberReplicas.decommissioning()); out.println("No. of corrupted Replica: " + numberReplicas.corruptReplicas()); //record datanodes that have corrupted block replica Collection<DatanodeDescriptor> corruptionRecord = null; if (blockManager.getCorruptReplicas(block) != null) { corruptionRecord = blockManager.getCorruptReplicas(block); } //report block replicas status on datanodes for(int idx = (blockInfo.numNodes()-1); idx >= 0; idx--) { DatanodeDescriptor dn = blockInfo.getDatanode(idx); out.print("Block replica on datanode/rack: " + dn.getHostName() + dn.getNetworkLocation() + " "); if (corruptionRecord != null && corruptionRecord.contains(dn)) { out.print(CORRUPT_STATUS + "\t ReasonCode: " + blockManager.getCorruptReason(block, dn)); } else if (dn.isDecommissioned() ){ out.print(DECOMMISSIONED_STATUS); } else if (dn.isDecommissionInProgress()) { out.print(DECOMMISSIONING_STATUS); } else { out.print(HEALTHY_STATUS); } out.print("\n"); } } catch (Exception e){ String errMsg = "Fsck on blockId '" + blockId; LOG.warn(errMsg, e); out.println(e.getMessage()); out.print("\n\n" + errMsg); LOG.warn("Error in looking up block", e); } } /** * Check files on DFS, starting from the indicated path. */ public void fsck() { final long startTime = Time.monotonicNow(); try { if(blockIds != null) { String[] blocks = blockIds.split(" "); StringBuilder sb = new StringBuilder(); sb.append("FSCK started by " + UserGroupInformation.getCurrentUser() + " from " + remoteAddress + " at " + new Date()); out.println(sb); sb.append(" for blockIds: \n"); for (String blk: blocks) { if(blk == null || !blk.contains(Block.BLOCK_FILE_PREFIX)) { out.println("Incorrect blockId format: " + blk); continue; } out.print("\n"); blockIdCK(blk); sb.append(blk + "\n"); } LOG.info(sb); namenode.getNamesystem().logFsckEvent("/", remoteAddress); out.flush(); return; } String msg = "FSCK started by " + UserGroupInformation.getCurrentUser() + " from " + remoteAddress + " for path " + path + " at " + new Date(); LOG.info(msg); out.println(msg); namenode.getNamesystem().logFsckEvent(path, remoteAddress); if (snapshottableDirs != null) { SnapshottableDirectoryStatus[] snapshotDirs = namenode.getRpcServer().getSnapshottableDirListing(); if (snapshotDirs != null) { for (SnapshottableDirectoryStatus dir : snapshotDirs) { snapshottableDirs.add(dir.getFullPath().toString()); } } } final HdfsFileStatus file = namenode.getRpcServer().getFileInfo(path); if (file != null) { if (showCorruptFileBlocks) { listCorruptFileBlocks(); return; } if (this.showStoragePolcies) { storageTypeSummary = new StoragePolicySummary( namenode.getNamesystem().getBlockManager().getStoragePolicies()); } Result replRes = new ReplicationResult(conf); Result ecRes = new ErasureCodingResult(conf); check(path, file, replRes, ecRes); out.print("\nStatus: "); out.println(replRes.isHealthy() && ecRes.isHealthy() ? "HEALTHY" : "CORRUPT"); out.println(" Number of data-nodes:\t" + totalDatanodes); out.println(" Number of racks:\t\t" + networktopology.getNumOfRacks()); out.println(" Total dirs:\t\t\t" + totalDirs); out.println(" Total symlinks:\t\t" + totalSymlinks); out.println("\nReplicated Blocks:"); out.println(replRes); out.println("\nErasure Coded Block Groups:"); out.println(ecRes); if (this.showStoragePolcies) { out.print(storageTypeSummary); } out.println("FSCK ended at " + new Date() + " in " + (Time.monotonicNow() - startTime + " milliseconds")); // If there were internal errors during the fsck operation, we want to // return FAILURE_STATUS, even if those errors were not immediately // fatal. Otherwise many unit tests will pass even when there are bugs. if (internalError) { throw new IOException("fsck encountered internal errors!"); } // DFSck client scans for the string HEALTHY/CORRUPT to check the status // of file system and return appropriate code. Changing the output // string might break testcases. Also note this must be the last line // of the report. if (replRes.isHealthy() && ecRes.isHealthy()) { out.print("\n\nThe filesystem under path '" + path + "' " + HEALTHY_STATUS); } else { out.print("\n\nThe filesystem under path '" + path + "' " + CORRUPT_STATUS); } } else { out.print("\n\nPath '" + path + "' " + NONEXISTENT_STATUS); } } catch (Exception e) { String errMsg = "Fsck on path '" + path + "' " + FAILURE_STATUS; LOG.warn(errMsg, e); out.println("FSCK ended at " + new Date() + " in " + (Time.monotonicNow() - startTime + " milliseconds")); out.println(e.getMessage()); out.print("\n\n" + errMsg); } finally { out.close(); } } private void listCorruptFileBlocks() throws IOException { final List<String> corrputBlocksFiles = namenode.getNamesystem() .listCorruptFileBlocksWithSnapshot(path, snapshottableDirs, currentCookie); int numCorruptFiles = corrputBlocksFiles.size(); String filler; if (numCorruptFiles > 0) { filler = Integer.toString(numCorruptFiles); } else if (currentCookie[0].equals("0")) { filler = "no"; } else { filler = "no more"; } out.println("Cookie:\t" + currentCookie[0]); for (String s : corrputBlocksFiles) { out.println(s); } out.println("\n\nThe filesystem under path '" + path + "' has " + filler + " CORRUPT files"); out.println(); } @VisibleForTesting void check(String parent, HdfsFileStatus file, Result replRes, Result ecRes) throws IOException { String path = file.getFullName(parent); if (file.isDir()) { checkDir(path, replRes, ecRes); return; } if (file.isSymlink()) { if (showFiles) { out.println(path + " <symlink>"); } totalSymlinks++; return; } LocatedBlocks blocks = getBlockLocations(path, file); if (blocks == null) { // the file is deleted return; } final Result r = file.getErasureCodingPolicy() != null ? ecRes: replRes; collectFileSummary(path, file, r, blocks); if (showprogress && (replRes.totalFiles + ecRes.totalFiles) % 100 == 0) { out.println(); out.flush(); } collectBlocksSummary(parent, file, r, blocks); } private void checkDir(String path, Result replRes, Result ecRes) throws IOException { if (snapshottableDirs != null && snapshottableDirs.contains(path)) { String snapshotPath = (path.endsWith(Path.SEPARATOR) ? path : path + Path.SEPARATOR) + HdfsConstants.DOT_SNAPSHOT_DIR; HdfsFileStatus snapshotFileInfo = namenode.getRpcServer().getFileInfo( snapshotPath); check(snapshotPath, snapshotFileInfo, replRes, ecRes); } byte[] lastReturnedName = HdfsFileStatus.EMPTY_NAME; DirectoryListing thisListing; if (showFiles) { out.println(path + " <dir>"); } totalDirs++; do { assert lastReturnedName != null; thisListing = namenode.getRpcServer().getListing( path, lastReturnedName, false); if (thisListing == null) { return; } HdfsFileStatus[] files = thisListing.getPartialListing(); for (int i = 0; i < files.length; i++) { check(path, files[i], replRes, ecRes); } lastReturnedName = thisListing.getLastName(); } while (thisListing.hasMore()); } private LocatedBlocks getBlockLocations(String path, HdfsFileStatus file) throws IOException { long fileLen = file.getLen(); LocatedBlocks blocks = null; final FSNamesystem fsn = namenode.getNamesystem(); fsn.readLock(); try { blocks = FSDirStatAndListingOp.getBlockLocations( fsn.getFSDirectory(), fsn.getPermissionChecker(), path, 0, fileLen, false) .blocks; } catch (FileNotFoundException fnfe) { blocks = null; } finally { fsn.readUnlock(); } return blocks; } private void collectFileSummary(String path, HdfsFileStatus file, Result res, LocatedBlocks blocks) throws IOException { long fileLen = file.getLen(); boolean isOpen = blocks.isUnderConstruction(); if (isOpen && !showOpenFiles) { // We collect these stats about open files to report with default options res.totalOpenFilesSize += fileLen; res.totalOpenFilesBlocks += blocks.locatedBlockCount(); res.totalOpenFiles++; return; } res.totalFiles++; res.totalSize += fileLen; res.totalBlocks += blocks.locatedBlockCount(); if (showOpenFiles && isOpen) { out.print(path + " " + fileLen + " bytes, " + blocks.locatedBlockCount() + " block(s), OPENFORWRITE: "); } else if (showFiles) { out.print(path + " " + fileLen + " bytes, " + blocks.locatedBlockCount() + " block(s): "); } else if (showprogress) { out.print('.'); } } /** * Display info of each replica for replication block. * For striped block group, display info of each internal block. */ private String getReplicaInfo(BlockInfo storedBlock) { if (!(showLocations || showRacks || showReplicaDetails)) { return ""; } final boolean isComplete = storedBlock.isComplete(); DatanodeStorageInfo[] storages = isComplete ? blockManager.getStorages(storedBlock) : storedBlock.getUnderConstructionFeature().getExpectedStorageLocations(); StringBuilder sb = new StringBuilder(" ["); for (int i = 0; i < storages.length; i++) { DatanodeStorageInfo storage = storages[i]; DatanodeDescriptor dnDesc = storage.getDatanodeDescriptor(); if (showRacks) { sb.append(NodeBase.getPath(dnDesc)); } else { sb.append(new DatanodeInfoWithStorage(dnDesc, storage.getStorageID(), storage.getStorageType())); } if (showReplicaDetails) { LightWeightHashSet<BlockInfo> blocksExcess = blockManager.excessReplicateMap.get(dnDesc.getDatanodeUuid()); Collection<DatanodeDescriptor> corruptReplicas = blockManager.getCorruptReplicas(storedBlock); sb.append("("); if (dnDesc.isDecommissioned()) { sb.append("DECOMMISSIONED)"); } else if (dnDesc.isDecommissionInProgress()) { sb.append("DECOMMISSIONING)"); } else if (corruptReplicas != null && corruptReplicas.contains(dnDesc)) { sb.append("CORRUPT)"); } else if (blocksExcess != null && blocksExcess.contains(storedBlock)) { sb.append("EXCESS)"); } else if (dnDesc.isStale(this.staleInterval)) { sb.append("STALE_NODE)"); } else if (storage.areBlockContentsStale()) { sb.append("STALE_BLOCK_CONTENT)"); } else { sb.append("LIVE)"); } } if (i < storages.length - 1) { sb.append(", "); } } sb.append(']'); return sb.toString(); } private void collectBlocksSummary(String parent, HdfsFileStatus file, Result res, LocatedBlocks blocks) throws IOException { String path = file.getFullName(parent); boolean isOpen = blocks.isUnderConstruction(); if (isOpen && !showOpenFiles) { return; } int missing = 0; int corrupt = 0; long missize = 0; long corruptSize = 0; int underReplicatedPerFile = 0; int misReplicatedPerFile = 0; StringBuilder report = new StringBuilder(); int blockNumber = 0; final LocatedBlock lastBlock = blocks.getLastLocatedBlock(); for (LocatedBlock lBlk : blocks.getLocatedBlocks()) { ExtendedBlock block = lBlk.getBlock(); if (!blocks.isLastBlockComplete() && lastBlock != null && lastBlock.getBlock().equals(block)) { // this is the last block and this is not complete. ignore it since // it is under construction continue; } final BlockInfo storedBlock = blockManager.getStoredBlock( block.getLocalBlock()); final int minReplication = blockManager.getMinStorageNum(storedBlock); // count decommissionedReplicas / decommissioningReplicas NumberReplicas numberReplicas = blockManager.countNodes(storedBlock); int decommissionedReplicas = numberReplicas.decommissioned(); int decommissioningReplicas = numberReplicas.decommissioning(); res.decommissionedReplicas += decommissionedReplicas; res.decommissioningReplicas += decommissioningReplicas; // count total replicas int liveReplicas = numberReplicas.liveReplicas(); int totalReplicasPerBlock = liveReplicas + decommissionedReplicas + decommissioningReplicas; res.totalReplicas += totalReplicasPerBlock; // count expected replicas short targetFileReplication; if (file.getErasureCodingPolicy() != null) { assert storedBlock instanceof BlockInfoStriped; targetFileReplication = ((BlockInfoStriped) storedBlock) .getRealTotalBlockNum(); } else { targetFileReplication = file.getReplication(); } res.numExpectedReplicas += targetFileReplication; // count under min repl'd blocks if(totalReplicasPerBlock < minReplication){ res.numUnderMinReplicatedBlocks++; } // count excessive Replicas / over replicated blocks if (liveReplicas > targetFileReplication) { res.excessiveReplicas += (liveReplicas - targetFileReplication); res.numOverReplicatedBlocks += 1; } // count corrupt blocks boolean isCorrupt = lBlk.isCorrupt(); if (isCorrupt) { res.addCorrupt(block.getNumBytes()); corrupt++; corruptSize += block.getNumBytes(); out.print("\n" + path + ": CORRUPT blockpool " + block.getBlockPoolId() + " block " + block.getBlockName() + "\n"); } // count minimally replicated blocks if (totalReplicasPerBlock >= minReplication) res.numMinReplicatedBlocks++; // count missing replicas / under replicated blocks if (totalReplicasPerBlock < targetFileReplication && totalReplicasPerBlock > 0) { res.missingReplicas += (targetFileReplication - totalReplicasPerBlock); res.numUnderReplicatedBlocks += 1; underReplicatedPerFile++; if (!showFiles) { out.print("\n" + path + ": "); } out.println(" Under replicated " + block + ". Target Replicas is " + targetFileReplication + " but found " + liveReplicas + " live replica(s), " + decommissionedReplicas + " decommissioned replica(s) and " + decommissioningReplicas + " decommissioning replica(s)."); } // count mis replicated blocks BlockPlacementStatus blockPlacementStatus = bpPolicies.getPolicy( lBlk.isStriped()).verifyBlockPlacement(lBlk.getLocations(), targetFileReplication); if (!blockPlacementStatus.isPlacementPolicySatisfied()) { res.numMisReplicatedBlocks++; misReplicatedPerFile++; if (!showFiles) { if(underReplicatedPerFile == 0) out.println(); out.print(path + ": "); } out.println(" Replica placement policy is violated for " + block + ". " + blockPlacementStatus.getErrorDescription()); } // count storage summary if (this.showStoragePolcies && lBlk.getStorageTypes() != null) { countStorageTypeSummary(file, lBlk); } // report String blkName = block.toString(); report.append(blockNumber + ". " + blkName + " len=" + block.getNumBytes()); if (totalReplicasPerBlock == 0 && !isCorrupt) { // If the block is corrupted, it means all its available replicas are // corrupted. We don't mark it as missing given these available replicas // might still be accessible as the block might be incorrectly marked as // corrupted by client machines. report.append(" MISSING!"); res.addMissing(blkName, block.getNumBytes()); missing++; missize += block.getNumBytes(); } else { report.append(" Live_repl=" + liveReplicas); String info = getReplicaInfo(storedBlock); if (!info.isEmpty()){ report.append(" ").append(info); } } report.append('\n'); blockNumber++; } //display under construction block info. if (!blocks.isLastBlockComplete() && lastBlock != null) { ExtendedBlock block = lastBlock.getBlock(); String blkName = block.toString(); BlockInfo storedBlock = blockManager.getStoredBlock( block.getLocalBlock()); DatanodeStorageInfo[] storages = storedBlock .getUnderConstructionFeature().getExpectedStorageLocations(); report.append('\n'); report.append("Under Construction Block:\n"); report.append(blockNumber).append(". ").append(blkName); report.append(" len=").append(block.getNumBytes()); report.append(" Expected_repl=" + storages.length); String info=getReplicaInfo(storedBlock); if (!info.isEmpty()){ report.append(" ").append(info); } } // count corrupt file & move or delete if necessary if ((missing > 0) || (corrupt > 0)) { if (!showFiles) { if (missing > 0) { out.print("\n" + path + ": MISSING " + missing + " blocks of total size " + missize + " B."); } if (corrupt > 0) { out.print("\n" + path + ": CORRUPT " + corrupt + " blocks of total size " + corruptSize + " B."); } } res.corruptFiles++; if (isOpen) { LOG.info("Fsck: ignoring open file " + path); } else { if (doMove) copyBlocksToLostFound(parent, file, blocks); if (doDelete) deleteCorruptedFile(path); } } if (showFiles) { if (missing > 0 || corrupt > 0) { if (missing > 0) { out.print(" MISSING " + missing + " blocks of total size " + missize + " B\n"); } if (corrupt > 0) { out.print(" CORRUPT " + corrupt + " blocks of total size " + corruptSize + " B\n"); } } else if (underReplicatedPerFile == 0 && misReplicatedPerFile == 0) { out.print(" OK\n"); } if (showBlocks) { out.print(report + "\n"); } } } private void countStorageTypeSummary(HdfsFileStatus file, LocatedBlock lBlk) { StorageType[] storageTypes = lBlk.getStorageTypes(); storageTypeSummary.add(Arrays.copyOf(storageTypes, storageTypes.length), namenode.getNamesystem().getBlockManager() .getStoragePolicy(file.getStoragePolicy())); } private void deleteCorruptedFile(String path) { try { namenode.getRpcServer().delete(path, true); LOG.info("Fsck: deleted corrupt file " + path); } catch (Exception e) { LOG.error("Fsck: error deleting corrupted file " + path, e); internalError = true; } } boolean hdfsPathExists(String path) throws AccessControlException, UnresolvedLinkException, IOException { try { HdfsFileStatus hfs = namenode.getRpcServer().getFileInfo(path); return (hfs != null); } catch (FileNotFoundException e) { return false; } } private void copyBlocksToLostFound(String parent, HdfsFileStatus file, LocatedBlocks blocks) throws IOException { final DFSClient dfs = new DFSClient(DFSUtilClient.getNNAddress(conf), conf); final String fullName = file.getFullName(parent); OutputStream fos = null; try { if (!lfInited) { lostFoundInit(dfs); } if (!lfInitedOk) { throw new IOException("failed to initialize lost+found"); } String target = lostFound + fullName; if (hdfsPathExists(target)) { LOG.warn("Fsck: can't copy the remains of " + fullName + " to " + "lost+found, because " + target + " already exists."); return; } if (!namenode.getRpcServer().mkdirs( target, file.getPermission(), true)) { throw new IOException("failed to create directory " + target); } // create chains int chain = 0; boolean copyError = false; for (LocatedBlock lBlk : blocks.getLocatedBlocks()) { LocatedBlock lblock = lBlk; DatanodeInfo[] locs = lblock.getLocations(); if (locs == null || locs.length == 0) { if (fos != null) { fos.flush(); fos.close(); fos = null; } continue; } if (fos == null) { fos = dfs.create(target + "/" + chain, true); chain++; } // copy the block. It's a pity it's not abstracted from DFSInputStream ... try { copyBlock(dfs, lblock, fos); } catch (Exception e) { LOG.error("Fsck: could not copy block " + lblock.getBlock() + " to " + target, e); fos.flush(); fos.close(); fos = null; internalError = true; copyError = true; } } if (copyError) { LOG.warn("Fsck: there were errors copying the remains of the " + "corrupted file " + fullName + " to /lost+found"); } else { LOG.info("Fsck: copied the remains of the corrupted file " + fullName + " to /lost+found"); } } catch (Exception e) { LOG.error("copyBlocksToLostFound: error processing " + fullName, e); internalError = true; } finally { if (fos != null) fos.close(); dfs.close(); } } /* * XXX (ab) Bulk of this method is copied verbatim from {@link DFSClient}, which is * bad. Both places should be refactored to provide a method to copy blocks * around. */ private void copyBlock(final DFSClient dfs, LocatedBlock lblock, OutputStream fos) throws Exception { int failures = 0; InetSocketAddress targetAddr = null; TreeSet<DatanodeInfo> deadNodes = new TreeSet<DatanodeInfo>(); BlockReader blockReader = null; ExtendedBlock block = lblock.getBlock(); while (blockReader == null) { DatanodeInfo chosenNode; try { chosenNode = bestNode(dfs, lblock.getLocations(), deadNodes); targetAddr = NetUtils.createSocketAddr(chosenNode.getXferAddr()); } catch (IOException ie) { if (failures >= HdfsClientConfigKeys.DFS_CLIENT_MAX_BLOCK_ACQUIRE_FAILURES_DEFAULT) { throw new IOException("Could not obtain block " + lblock, ie); } LOG.info("Could not obtain block from any node: " + ie); try { Thread.sleep(10000); } catch (InterruptedException iex) { } deadNodes.clear(); failures++; continue; } try { String file = BlockReaderFactory.getFileName(targetAddr, block.getBlockPoolId(), block.getBlockId()); blockReader = new BlockReaderFactory(dfs.getConf()). setFileName(file). setBlock(block). setBlockToken(lblock.getBlockToken()). setStartOffset(0). setLength(-1). setVerifyChecksum(true). setClientName("fsck"). setDatanodeInfo(chosenNode). setInetSocketAddress(targetAddr). setCachingStrategy(CachingStrategy.newDropBehind()). setClientCacheContext(dfs.getClientContext()). setConfiguration(namenode.conf). setTracer(tracer). setRemotePeerFactory(new RemotePeerFactory() { @Override public Peer newConnectedPeer(InetSocketAddress addr, Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId) throws IOException { Peer peer = null; Socket s = NetUtils.getDefaultSocketFactory(conf).createSocket(); try { s.connect(addr, HdfsConstants.READ_TIMEOUT); s.setSoTimeout(HdfsConstants.READ_TIMEOUT); peer = DFSUtilClient.peerFromSocketAndKey( dfs.getSaslDataTransferClient(), s, NamenodeFsck.this, blockToken, datanodeId); } finally { if (peer == null) { IOUtils.closeQuietly(s); } } return peer; } }). build(); } catch (IOException ex) { // Put chosen node into dead list, continue LOG.info("Failed to connect to " + targetAddr + ":" + ex); deadNodes.add(chosenNode); } } byte[] buf = new byte[1024]; int cnt = 0; boolean success = true; long bytesRead = 0; try { while ((cnt = blockReader.read(buf, 0, buf.length)) > 0) { fos.write(buf, 0, cnt); bytesRead += cnt; } if ( bytesRead != block.getNumBytes() ) { throw new IOException("Recorded block size is " + block.getNumBytes() + ", but datanode returned " +bytesRead+" bytes"); } } catch (Exception e) { LOG.error("Error reading block", e); success = false; } finally { blockReader.close(); } if (!success) { throw new Exception("Could not copy block data for " + lblock.getBlock()); } } @Override public DataEncryptionKey newDataEncryptionKey() throws IOException { return namenode.getRpcServer().getDataEncryptionKey(); } /* * XXX (ab) See comment above for copyBlock(). * * Pick the best node from which to stream the data. * That's the local one, if available. */ private DatanodeInfo bestNode(DFSClient dfs, DatanodeInfo[] nodes, TreeSet<DatanodeInfo> deadNodes) throws IOException { if ((nodes == null) || (nodes.length - deadNodes.size() < 1)) { throw new IOException("No live nodes contain current block"); } DatanodeInfo chosenNode; do { chosenNode = nodes[ThreadLocalRandom.current().nextInt(nodes.length)]; } while (deadNodes.contains(chosenNode)); return chosenNode; } private void lostFoundInit(DFSClient dfs) { lfInited = true; try { String lfName = "/lost+found"; final HdfsFileStatus lfStatus = dfs.getFileInfo(lfName); if (lfStatus == null) { // not exists lfInitedOk = dfs.mkdirs(lfName, null, true); lostFound = lfName; } else if (!lfStatus.isDir()) { // exists but not a directory LOG.warn("Cannot use /lost+found : a regular file with this name exists."); lfInitedOk = false; } else { // exists and is a directory lostFound = lfName; lfInitedOk = true; } } catch (Exception e) { e.printStackTrace(); lfInitedOk = false; } if (lostFound == null) { LOG.warn("Cannot initialize /lost+found ."); lfInitedOk = false; internalError = true; } } /** * FsckResult of checking, plus overall DFS statistics. */ @VisibleForTesting static class Result { final List<String> missingIds = new ArrayList<String>(); long missingSize = 0L; long corruptFiles = 0L; long corruptBlocks = 0L; long corruptSize = 0L; long excessiveReplicas = 0L; long missingReplicas = 0L; long decommissionedReplicas = 0L; long decommissioningReplicas = 0L; long numUnderMinReplicatedBlocks = 0L; long numOverReplicatedBlocks = 0L; long numUnderReplicatedBlocks = 0L; long numMisReplicatedBlocks = 0L; // blocks that do not satisfy block placement policy long numMinReplicatedBlocks = 0L; // minimally replicatedblocks long totalBlocks = 0L; long numExpectedReplicas = 0L; long totalOpenFilesBlocks = 0L; long totalFiles = 0L; long totalOpenFiles = 0L; long totalSize = 0L; long totalOpenFilesSize = 0L; long totalReplicas = 0L; /** * DFS is considered healthy if there are no missing blocks. */ boolean isHealthy() { return ((missingIds.size() == 0) && (corruptBlocks == 0)); } /** Add a missing block name, plus its size. */ void addMissing(String id, long size) { missingIds.add(id); missingSize += size; } /** Add a corrupt block. */ void addCorrupt(long size) { corruptBlocks++; corruptSize += size; } /** Return the actual replication factor. */ float getReplicationFactor() { if (totalBlocks == 0) return 0.0f; return (float) (totalReplicas) / (float) totalBlocks; } } @VisibleForTesting static class ReplicationResult extends Result { final short replication; final short minReplication; ReplicationResult(Configuration conf) { this.replication = (short)conf.getInt(DFSConfigKeys.DFS_REPLICATION_KEY, DFSConfigKeys.DFS_REPLICATION_DEFAULT); this.minReplication = (short)conf.getInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY, DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_DEFAULT); } @Override public String toString() { StringBuilder res = new StringBuilder(); res.append(" Total size:\t").append(totalSize).append(" B"); if (totalOpenFilesSize != 0) { res.append(" (Total open files size: ").append(totalOpenFilesSize) .append(" B)"); } res.append("\n Total files:\t").append(totalFiles); if (totalOpenFiles != 0) { res.append(" (Files currently being written: ").append(totalOpenFiles) .append(")"); } res.append("\n Total blocks (validated):\t").append(totalBlocks); if (totalBlocks > 0) { res.append(" (avg. block size ").append((totalSize / totalBlocks)) .append(" B)"); } if (totalOpenFilesBlocks != 0) { res.append(" (Total open file blocks (not validated): ").append( totalOpenFilesBlocks).append(")"); } if (corruptFiles > 0 || numUnderMinReplicatedBlocks > 0) { res.append("\n ********************************"); if(numUnderMinReplicatedBlocks>0){ res.append("\n UNDER MIN REPL'D BLOCKS:\t").append(numUnderMinReplicatedBlocks); if(totalBlocks>0){ res.append(" (").append( ((float) (numUnderMinReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } res.append("\n ").append(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY + ":\t") .append(minReplication); } if(corruptFiles>0) { res.append( "\n CORRUPT FILES:\t").append(corruptFiles); if (missingSize > 0) { res.append("\n MISSING BLOCKS:\t").append(missingIds.size()).append( "\n MISSING SIZE:\t\t").append(missingSize).append(" B"); } if (corruptBlocks > 0) { res.append("\n CORRUPT BLOCKS: \t").append(corruptBlocks).append( "\n CORRUPT SIZE:\t\t").append(corruptSize).append(" B"); } } res.append("\n ********************************"); } res.append("\n Minimally replicated blocks:\t").append( numMinReplicatedBlocks); if (totalBlocks > 0) { res.append(" (").append( ((float) (numMinReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } res.append("\n Over-replicated blocks:\t") .append(numOverReplicatedBlocks); if (totalBlocks > 0) { res.append(" (").append( ((float) (numOverReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } res.append("\n Under-replicated blocks:\t").append( numUnderReplicatedBlocks); if (totalBlocks > 0) { res.append(" (").append( ((float) (numUnderReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } res.append("\n Mis-replicated blocks:\t\t") .append(numMisReplicatedBlocks); if (totalBlocks > 0) { res.append(" (").append( ((float) (numMisReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } res.append("\n Default replication factor:\t").append(replication) .append("\n Average block replication:\t").append( getReplicationFactor()).append("\n Missing blocks:\t\t").append( missingIds.size()).append("\n Corrupt blocks:\t\t").append( corruptBlocks).append("\n Missing replicas:\t\t").append( missingReplicas); if (totalReplicas > 0) { res.append(" (").append( ((float) (missingReplicas * 100) / (float) numExpectedReplicas)).append( " %)"); } if (decommissionedReplicas > 0) { res.append("\n DecommissionedReplicas:\t").append( decommissionedReplicas); } if (decommissioningReplicas > 0) { res.append("\n DecommissioningReplicas:\t").append( decommissioningReplicas); } return res.toString(); } } @VisibleForTesting static class ErasureCodingResult extends Result { final String defaultECPolicy; ErasureCodingResult(Configuration conf) { defaultECPolicy = ErasureCodingPolicyManager.getSystemDefaultPolicy() .getName(); } @Override public String toString() { StringBuilder res = new StringBuilder(); res.append(" Total size:\t").append(totalSize).append(" B"); if (totalOpenFilesSize != 0) { res.append(" (Total open files size: ").append(totalOpenFilesSize) .append(" B)"); } res.append("\n Total files:\t").append(totalFiles); if (totalOpenFiles != 0) { res.append(" (Files currently being written: ").append(totalOpenFiles) .append(")"); } res.append("\n Total block groups (validated):\t").append(totalBlocks); if (totalBlocks > 0) { res.append(" (avg. block group size ").append((totalSize / totalBlocks)) .append(" B)"); } if (totalOpenFilesBlocks != 0) { res.append(" (Total open file block groups (not validated): ").append( totalOpenFilesBlocks).append(")"); } if (corruptFiles > 0 || numUnderMinReplicatedBlocks > 0) { res.append("\n ********************************"); if(numUnderMinReplicatedBlocks>0){ res.append("\n UNRECOVERABLE BLOCK GROUPS:\t").append(numUnderMinReplicatedBlocks); if(totalBlocks>0){ res.append(" (").append( ((float) (numUnderMinReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } } if(corruptFiles>0) { res.append( "\n CORRUPT FILES:\t").append(corruptFiles); if (missingSize > 0) { res.append("\n MISSING BLOCK GROUPS:\t").append(missingIds.size()).append( "\n MISSING SIZE:\t\t").append(missingSize).append(" B"); } if (corruptBlocks > 0) { res.append("\n CORRUPT BLOCK GROUPS: \t").append(corruptBlocks).append( "\n CORRUPT SIZE:\t\t").append(corruptSize).append(" B"); } } res.append("\n ********************************"); } res.append("\n Minimally erasure-coded block groups:\t").append( numMinReplicatedBlocks); if (totalBlocks > 0) { res.append(" (").append( ((float) (numMinReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } res.append("\n Over-erasure-coded block groups:\t") .append(numOverReplicatedBlocks); if (totalBlocks > 0) { res.append(" (").append( ((float) (numOverReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } res.append("\n Under-erasure-coded block groups:\t").append( numUnderReplicatedBlocks); if (totalBlocks > 0) { res.append(" (").append( ((float) (numUnderReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } res.append("\n Unsatisfactory placement block groups:\t") .append(numMisReplicatedBlocks); if (totalBlocks > 0) { res.append(" (").append( ((float) (numMisReplicatedBlocks * 100) / (float) totalBlocks)) .append(" %)"); } res.append("\n Default ecPolicy:\t\t").append(defaultECPolicy) .append("\n Average block group size:\t").append( getReplicationFactor()).append("\n Missing block groups:\t\t").append( missingIds.size()).append("\n Corrupt block groups:\t\t").append( corruptBlocks).append("\n Missing internal blocks:\t").append( missingReplicas); if (totalReplicas > 0) { res.append(" (").append( ((float) (missingReplicas * 100) / (float) numExpectedReplicas)).append( " %)"); } if (decommissionedReplicas > 0) { res.append("\n Decommissioned internal blocks:\t").append( decommissionedReplicas); } if (decommissioningReplicas > 0) { res.append("\n Decommissioning internal blocks:\t").append( decommissioningReplicas); } return res.toString(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.concurrent.Callable; import javax.cache.Cache; import javax.cache.CacheException; import javax.cache.processor.EntryProcessor; import javax.cache.processor.MutableEntry; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.CachePeekMode; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteKernal; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearCacheAdapter; import org.apache.ignite.internal.transactions.IgniteTxHeuristicCheckedException; import org.apache.ignite.internal.util.typedef.PA; import org.apache.ignite.spi.IgniteSpiAdapter; import org.apache.ignite.spi.IgniteSpiException; import org.apache.ignite.spi.indexing.IndexingQueryFilter; import org.apache.ignite.spi.indexing.IndexingSpi; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.transactions.Transaction; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionHeuristicException; import org.apache.ignite.transactions.TransactionIsolation; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.cache.CacheMode.LOCAL; import static org.apache.ignite.cache.CacheMode.REPLICATED; /** * Tests that transaction is invalidated in case of {@link IgniteTxHeuristicCheckedException}. */ public abstract class IgniteTxExceptionAbstractSelfTest extends GridCacheAbstractSelfTest { /** */ private static final int PRIMARY = 0; /** */ private static final int BACKUP = 1; /** */ private static final int NOT_PRIMARY_AND_BACKUP = 2; /** */ private static volatile Integer lastKey; /** {@inheritDoc} */ @Override protected int gridCount() { return 3; } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); cfg.setIndexingSpi(new TestIndexingSpi()); cfg.getTransactionConfiguration().setTxSerializableEnabled(true); return cfg; } /** {@inheritDoc} */ @Override protected CacheConfiguration cacheConfiguration(String igniteInstanceName) throws Exception { CacheConfiguration ccfg = super.cacheConfiguration(igniteInstanceName); ccfg.setCacheStoreFactory(null); ccfg.setReadThrough(false); ccfg.setWriteThrough(false); ccfg.setLoadPreviousValue(true); ccfg.setIndexedTypes(Integer.class, Integer.class); return ccfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); lastKey = 0; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { TestIndexingSpi.forceFail(false); Transaction tx = jcache().unwrap(Ignite.class).transactions().tx(); if (tx != null) { tx.close(); fail("Cache transaction remained after test completion: " + tx); } for (int key = 0; key <= lastKey; key++) grid(0).cache(DEFAULT_CACHE_NAME).remove(key); assertEquals(0, jcache(0).size(CachePeekMode.ALL)); } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { super.beforeTest(); lastKey = 0; } /** * @throws Exception If failed. */ public void testPutNear() throws Exception { checkPut(true, keyForNode(grid(0).localNode(), NOT_PRIMARY_AND_BACKUP)); checkPut(false, keyForNode(grid(0).localNode(), NOT_PRIMARY_AND_BACKUP)); } /** * @throws Exception If failed. */ public void testPutPrimary() throws Exception { checkPut(true, keyForNode(grid(0).localNode(), PRIMARY)); checkPut(false, keyForNode(grid(0).localNode(), PRIMARY)); } /** * @throws Exception If failed. */ public void testPutBackup() throws Exception { checkPut(true, keyForNode(grid(0).localNode(), BACKUP)); checkPut(false, keyForNode(grid(0).localNode(), BACKUP)); } /** * @throws Exception If failed. */ public void testPutAll() throws Exception { checkPutAll(true, keyForNode(grid(0).localNode(), PRIMARY), keyForNode(grid(0).localNode(), PRIMARY), keyForNode(grid(0).localNode(), PRIMARY)); checkPutAll(false, keyForNode(grid(0).localNode(), PRIMARY), keyForNode(grid(0).localNode(), PRIMARY), keyForNode(grid(0).localNode(), PRIMARY)); if (gridCount() > 1) { checkPutAll(true, keyForNode(grid(1).localNode(), PRIMARY), keyForNode(grid(1).localNode(), PRIMARY), keyForNode(grid(1).localNode(), PRIMARY)); checkPutAll(false, keyForNode(grid(1).localNode(), PRIMARY), keyForNode(grid(1).localNode(), PRIMARY), keyForNode(grid(1).localNode(), PRIMARY)); } } /** * @throws Exception If failed. */ public void testRemoveNear() throws Exception { checkRemove(false, keyForNode(grid(0).localNode(), NOT_PRIMARY_AND_BACKUP)); checkRemove(true, keyForNode(grid(0).localNode(), NOT_PRIMARY_AND_BACKUP)); } /** * @throws Exception If failed. */ public void testRemovePrimary() throws Exception { checkRemove(false, keyForNode(grid(0).localNode(), PRIMARY)); checkRemove(true, keyForNode(grid(0).localNode(), PRIMARY)); } /** * @throws Exception If failed. */ public void testRemoveBackup() throws Exception { checkRemove(false, keyForNode(grid(0).localNode(), BACKUP)); checkRemove(true, keyForNode(grid(0).localNode(), BACKUP)); } /** * @throws Exception If failed. */ public void testTransformNear() throws Exception { checkTransform(false, keyForNode(grid(0).localNode(), NOT_PRIMARY_AND_BACKUP)); checkTransform(true, keyForNode(grid(0).localNode(), NOT_PRIMARY_AND_BACKUP)); } /** * @throws Exception If failed. */ public void testTransformPrimary() throws Exception { checkTransform(false, keyForNode(grid(0).localNode(), PRIMARY)); checkTransform(true, keyForNode(grid(0).localNode(), PRIMARY)); } /** * @throws Exception If failed. */ public void testTransformBackup() throws Exception { checkTransform(false, keyForNode(grid(0).localNode(), BACKUP)); checkTransform(true, keyForNode(grid(0).localNode(), BACKUP)); } /** * @throws Exception If failed. */ public void testPutNearTx() throws Exception { for (TransactionConcurrency concurrency : TransactionConcurrency.values()) { for (TransactionIsolation isolation : TransactionIsolation.values()) { checkPutTx(true, concurrency, isolation, keyForNode(grid(0).localNode(), NOT_PRIMARY_AND_BACKUP)); checkPutTx(false, concurrency, isolation, keyForNode(grid(0).localNode(), NOT_PRIMARY_AND_BACKUP)); } } } /** * @throws Exception If failed. */ public void testPutPrimaryTx() throws Exception { for (TransactionConcurrency concurrency : TransactionConcurrency.values()) { for (TransactionIsolation isolation : TransactionIsolation.values()) { checkPutTx(true, concurrency, isolation, keyForNode(grid(0).localNode(), PRIMARY)); checkPutTx(false, concurrency, isolation, keyForNode(grid(0).localNode(), PRIMARY)); } } } /** * @throws Exception If failed. */ public void testPutBackupTx() throws Exception { for (TransactionConcurrency concurrency : TransactionConcurrency.values()) { for (TransactionIsolation isolation : TransactionIsolation.values()) { checkPutTx(true, concurrency, isolation, keyForNode(grid(0).localNode(), BACKUP)); checkPutTx(false, concurrency, isolation, keyForNode(grid(0).localNode(), BACKUP)); } } } /** * @throws Exception If failed. */ public void testPutMultipleKeysTx() throws Exception { for (TransactionConcurrency concurrency : TransactionConcurrency.values()) { for (TransactionIsolation isolation : TransactionIsolation.values()) { checkPutTx(true, concurrency, isolation, keyForNode(grid(0).localNode(), PRIMARY), keyForNode(grid(0).localNode(), PRIMARY), keyForNode(grid(0).localNode(), PRIMARY)); checkPutTx(false, concurrency, isolation, keyForNode(grid(0).localNode(), PRIMARY), keyForNode(grid(0).localNode(), PRIMARY), keyForNode(grid(0).localNode(), PRIMARY)); if (gridCount() > 1) { checkPutTx(true, concurrency, isolation, keyForNode(grid(1).localNode(), PRIMARY), keyForNode(grid(1).localNode(), PRIMARY), keyForNode(grid(1).localNode(), PRIMARY)); checkPutTx(false, concurrency, isolation, keyForNode(grid(1).localNode(), PRIMARY), keyForNode(grid(1).localNode(), PRIMARY), keyForNode(grid(1).localNode(), PRIMARY)); } } } } /** * @param putBefore If {@code true} then puts some value before executing failing operation. * @param keys Keys. * @param concurrency Transaction concurrency. * @param isolation Transaction isolation. * @throws Exception If failed. */ private void checkPutTx(boolean putBefore, TransactionConcurrency concurrency, TransactionIsolation isolation, final Integer... keys) throws Exception { assertTrue(keys.length > 0); info("Test transaction [concurrency=" + concurrency + ", isolation=" + isolation + ']'); IgniteCache<Integer, Integer> cache = grid(0).cache(DEFAULT_CACHE_NAME); if (putBefore) { TestIndexingSpi.forceFail(false); info("Start transaction."); try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) { for (Integer key : keys) { info("Put " + key); cache.put(key, 1); } info("Commit."); tx.commit(); } } // Execute get from all nodes to create readers for near cache. for (int i = 0; i < gridCount(); i++) { for (Integer key : keys) grid(i).cache(DEFAULT_CACHE_NAME).get(key); } TestIndexingSpi.forceFail(true); try { info("Start transaction."); try (Transaction tx = grid(0).transactions().txStart(concurrency, isolation)) { for (Integer key : keys) { info("Put " + key); cache.put(key, 2); } info("Commit."); tx.commit(); } fail("Transaction should fail."); } catch (TransactionHeuristicException e) { log.info("Expected exception: " + e); } for (Integer key : keys) checkUnlocked(key); for (int i = 0; i < gridCount(); i++) assertEquals(0, ((IgniteKernal)ignite(0)).internalCache(DEFAULT_CACHE_NAME).context().tm().idMapSize()); } /** * @param key Key. * @throws Exception If failed. */ @SuppressWarnings("unchecked") private void checkUnlocked(final Integer key) throws Exception { TestIndexingSpi.forceFail(false); awaitPartitionMapExchange(); info("Check key: " + key); for (int i = 0; i < gridCount(); i++) { final int idx = i; GridTestUtils.waitForCondition(new PA() { @Override public boolean apply() { IgniteKernal grid = (IgniteKernal)grid(idx); GridCacheAdapter cache = grid.internalCache(DEFAULT_CACHE_NAME); GridCacheEntryEx entry = cache.peekEx(key); log.info("Entry: " + entry); if (entry != null) { try { boolean locked = entry.lockedByAny(); if (locked) { info("Unexpected entry for grid [i=" + idx + ", entry=" + entry + ']'); return false; } } catch (GridCacheEntryRemovedException ignore) { // Obsolete entry cannot be locked. } } if (cache.isNear()) { entry = ((GridNearCacheAdapter)cache).dht().peekEx(key); log.info("Dht entry: " + entry); if (entry != null) { try { boolean locked = entry.lockedByAny(); if (locked) { info("Unexpected entry for grid [i=" + idx + ", entry=" + entry + ']'); return false; } } catch (GridCacheEntryRemovedException ignore) { // Obsolete entry cannot be locked. } } } return true; } }, getTestTimeout()); } } /** * @param putBefore If {@code true} then puts some value before executing failing operation. * @param key Key. * @throws Exception If failed. */ private void checkPut(boolean putBefore, final Integer key) throws Exception { if (putBefore) { TestIndexingSpi.forceFail(false); info("Put key: " + key); grid(0).cache(DEFAULT_CACHE_NAME).put(key, 1); } // Execute get from all nodes to create readers for near cache. for (int i = 0; i < gridCount(); i++) grid(i).cache(DEFAULT_CACHE_NAME).get(key); TestIndexingSpi.forceFail(true); info("Going to put: " + key); GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { grid(0).cache(DEFAULT_CACHE_NAME).put(key, 2); return null; } }, TransactionHeuristicException.class, null); checkUnlocked(key); } /** * @param putBefore If {@code true} then puts some value before executing failing operation. * @param key Key. * @throws Exception If failed. */ private void checkTransform(boolean putBefore, final Integer key) throws Exception { if (putBefore) { TestIndexingSpi.forceFail(false); info("Put key: " + key); grid(0).cache(DEFAULT_CACHE_NAME).put(key, 1); } // Execute get from all nodes to create readers for near cache. for (int i = 0; i < gridCount(); i++) grid(i).cache(DEFAULT_CACHE_NAME).get(key); TestIndexingSpi.forceFail(true); info("Going to transform: " + key); Throwable e = GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { grid(0).<Integer, Integer>cache(DEFAULT_CACHE_NAME).invoke(key, new EntryProcessor<Integer, Integer, Void>() { @Override public Void process(MutableEntry<Integer, Integer> e, Object... args) { e.setValue(2); return null; } }); return null; } }, CacheException.class, null); assertTrue("Unexpected cause: " + e, e.getCause() instanceof TransactionHeuristicException); checkUnlocked(key); } /** * @param putBefore If {@code true} then puts some value before executing failing operation. * @param keys Keys. * @throws Exception If failed. */ private void checkPutAll(boolean putBefore, Integer... keys) throws Exception { assert keys.length > 1; if (putBefore) { TestIndexingSpi.forceFail(false); Map<Integer, Integer> m = new HashMap<>(); for (Integer key : keys) m.put(key, 1); info("Put data: " + m); grid(0).cache(DEFAULT_CACHE_NAME).putAll(m); } // Execute get from all nodes to create readers for near cache. for (int i = 0; i < gridCount(); i++) { for (Integer key : keys) grid(i).cache(DEFAULT_CACHE_NAME).get(key); } TestIndexingSpi.forceFail(true); final Map<Integer, Integer> m = new HashMap<>(); for (Integer key : keys) m.put(key, 2); info("Going to putAll: " + m); GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { grid(0).cache(DEFAULT_CACHE_NAME).putAll(m); return null; } }, TransactionHeuristicException.class, null); for (Integer key : m.keySet()) checkUnlocked(key); } /** * @param putBefore If {@code true} then puts some value before executing failing operation. * @param key Key. * @throws Exception If failed. */ private void checkRemove(boolean putBefore, final Integer key) throws Exception { if (putBefore) { TestIndexingSpi.forceFail(false); info("Put key: " + key); grid(0).cache(DEFAULT_CACHE_NAME).put(key, 1); } // Execute get from all nodes to create readers for near cache. for (int i = 0; i < gridCount(); i++) grid(i).cache(DEFAULT_CACHE_NAME).get(key); TestIndexingSpi.forceFail(true); info("Going to remove: " + key); GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { grid(0).cache(DEFAULT_CACHE_NAME).remove(key); return null; } }, TransactionHeuristicException.class, null); checkUnlocked(key); } /** * Generates key of a given type for given node. * * @param node Node. * @param type Key type. * @return Key. */ private Integer keyForNode(ClusterNode node, int type) { IgniteCache<Integer, Integer> cache = grid(0).cache(DEFAULT_CACHE_NAME); if (cache.getConfiguration(CacheConfiguration.class).getCacheMode() == LOCAL) return ++lastKey; if (cache.getConfiguration(CacheConfiguration.class).getCacheMode() == REPLICATED && type == NOT_PRIMARY_AND_BACKUP) return ++lastKey; for (int key = lastKey + 1; key < (lastKey + 10_000); key++) { switch (type) { case NOT_PRIMARY_AND_BACKUP: { if (!affinity(cache).isPrimaryOrBackup(node, key)) { lastKey = key; return key; } break; } case PRIMARY: { if (affinity(cache).isPrimary(node, key)) { lastKey = key; return key; } break; } case BACKUP: { if (affinity(cache).isBackup(node, key)) { lastKey = key; return key; } break; } default: fail(); } } throw new IllegalStateException("Failed to find key."); } /** * Indexing SPI that can fail on demand. */ private static class TestIndexingSpi extends IgniteSpiAdapter implements IndexingSpi { /** Fail flag. */ private static volatile boolean fail; /** * @param failFlag Fail flag. */ public static void forceFail(boolean failFlag) { fail = failFlag; } /** {@inheritDoc} */ @Override public Iterator<Cache.Entry<?, ?>> query(@Nullable String spaceName, Collection<Object> params, @Nullable IndexingQueryFilter filters) throws IgniteSpiException { throw new UnsupportedOperationException(); } /** {@inheritDoc} */ @Override public void store(@Nullable String spaceName, Object key, Object val, long expirationTime) throws IgniteSpiException { if (fail) { fail = false; throw new IgniteSpiException("Test exception."); } } /** {@inheritDoc} */ @Override public void remove(@Nullable String spaceName, Object k) throws IgniteSpiException { if (fail) { fail = false; throw new IgniteSpiException("Test exception."); } } /** {@inheritDoc} */ @Override public void spiStart(@Nullable String igniteInstanceName) throws IgniteSpiException { // No-op. } /** {@inheritDoc} */ @Override public void spiStop() throws IgniteSpiException { // No-op. } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.consul.policy; import java.math.BigInteger; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import com.google.common.base.Optional; import com.orbitz.consul.Consul; import com.orbitz.consul.KeyValueClient; import com.orbitz.consul.SessionClient; import com.orbitz.consul.async.ConsulResponseCallback; import com.orbitz.consul.model.ConsulResponse; import com.orbitz.consul.model.kv.Value; import com.orbitz.consul.model.session.ImmutableSession; import com.orbitz.consul.option.QueryOptions; import org.apache.camel.Exchange; import org.apache.camel.NonManagedService; import org.apache.camel.Route; import org.apache.camel.support.RoutePolicySupport; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ConsulRoutePolicy extends RoutePolicySupport implements NonManagedService { private static final Logger LOGGER = LoggerFactory.getLogger(ConsulRoutePolicy.class); private final Object lock; private final Consul consul; private final SessionClient sessionClient; private final KeyValueClient keyValueClient; private final AtomicBoolean leader; private final Set<Route> suspendedRoutes; private final AtomicReference<BigInteger> index; private String serviceName; private String servicePath; private int ttl; private int lockDelay; private ExecutorService executorService; private boolean shouldStopConsumer; private String sessionId; public ConsulRoutePolicy() { this(Consul.builder().build()); } public ConsulRoutePolicy(Consul consul) { this.consul = consul; this.sessionClient = consul.sessionClient(); this.keyValueClient = consul.keyValueClient(); this.suspendedRoutes = new HashSet<>(); this.leader = new AtomicBoolean(false); this.lock = new Object(); this.index = new AtomicReference<>(BigInteger.valueOf(0)); this.serviceName = null; this.servicePath = null; this.ttl = 60; this.lockDelay = 10; this.executorService = null; this.shouldStopConsumer = true; this.sessionId = null; } @Override public void onExchangeBegin(Route route, Exchange exchange) { if (leader.get()) { if (shouldStopConsumer) { startConsumer(route); } } else { if (shouldStopConsumer) { stopConsumer(route); } exchange.setException(new IllegalStateException( "Consul based route policy prohibits processing exchanges, stopping route and failing the exchange") ); } } @Override public void onStop(Route route) { synchronized (lock) { suspendedRoutes.remove(route); } } @Override public synchronized void onSuspend(Route route) { synchronized (lock) { suspendedRoutes.remove(route); } } @Override protected void doStart() throws Exception { if (sessionId == null) { sessionId = sessionClient.createSession( ImmutableSession.builder() .name(serviceName) .ttl(ttl + "s") .lockDelay(lockDelay + "s") .build() ).getId(); LOGGER.debug("SessionID = {}", sessionId); if (executorService == null) { executorService = Executors.newSingleThreadExecutor(); } setLeader(keyValueClient.acquireLock(servicePath, sessionId)); executorService.submit(new Watcher()); } super.doStart(); } @Override protected void doStop() throws Exception { super.doStop(); if (sessionId != null) { sessionClient.destroySession(sessionId); sessionId = null; if (executorService != null) { executorService.shutdown(); executorService.awaitTermination(ttl / 3, TimeUnit.SECONDS); } } } // ************************************************************************* // // ************************************************************************* protected void setLeader(boolean isLeader) { if (isLeader && leader.compareAndSet(false, isLeader)) { LOGGER.debug("Leadership taken ({}, {})", serviceName, sessionId); startAllStoppedConsumers(); } else { if (!leader.getAndSet(isLeader) && isLeader) { LOGGER.debug("Leadership lost ({}, {})", serviceName, sessionId); } } } private void startConsumer(Route route) { synchronized (lock) { try { if (suspendedRoutes.contains(route)) { startConsumer(route.getConsumer()); suspendedRoutes.remove(route); } } catch (Exception e) { handleException(e); } } } private void stopConsumer(Route route) { synchronized (lock) { try { if (!suspendedRoutes.contains(route)) { LOGGER.debug("Stopping consumer for {} ({})", route.getId(), route.getConsumer()); stopConsumer(route.getConsumer()); suspendedRoutes.add(route); } } catch (Exception e) { handleException(e); } } } private void startAllStoppedConsumers() { synchronized (lock) { try { for (Route route : suspendedRoutes) { LOGGER.debug("Starting consumer for {} ({})", route.getId(), route.getConsumer()); startConsumer(route.getConsumer()); } suspendedRoutes.clear(); } catch (Exception e) { handleException(e); } } } // ************************************************************************* // Getter/Setters // ************************************************************************* public Consul getConsul() { return consul; } public String getServiceName() { return serviceName; } public void setServiceName(String serviceName) { this.serviceName = serviceName; this.servicePath = String.format("/service/%s/leader", serviceName); } public int getTtl() { return ttl; } public void setTtl(int ttl) { this.ttl = ttl > 10 ? ttl : 10; } public int getLockDelay() { return lockDelay; } public void setLockDelay(int lockDelay) { this.lockDelay = lockDelay > 10 ? lockDelay : 10; } public ExecutorService getExecutorService() { return executorService; } public void setExecutorService(ExecutorService executorService) { this.executorService = executorService; } public boolean isShouldStopConsumer() { return shouldStopConsumer; } public void setShouldStopConsumer(boolean shouldStopConsumer) { this.shouldStopConsumer = shouldStopConsumer; } // ************************************************************************* // Watch // ************************************************************************* private class Watcher implements Runnable, ConsulResponseCallback<Optional<Value>> { @Override public void onComplete(ConsulResponse<Optional<Value>> consulResponse) { if (isRunAllowed()) { Value response = consulResponse.getResponse().orNull(); if (response != null) { String sid = response.getSession().orNull(); if (ObjectHelper.isEmpty(sid)) { // If the key is not held by any session, try acquire a // lock (become leader) LOGGER.debug("Try to take leadership ..."); setLeader(keyValueClient.acquireLock(servicePath, sessionId)); } else if (!sessionId.equals(sid) && leader.get()) { // Looks like I've lost leadership setLeader(false); } } index.set(consulResponse.getIndex()); run(); } } @Override public void onFailure(Throwable throwable) { handleException(throwable); } @Override public void run() { if (isRunAllowed()) { // Refresh session sessionClient.renewSession(sessionId); keyValueClient.getValue( servicePath, QueryOptions.blockSeconds(ttl / 3, index.get()).build(), this); } } } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs.annotate; import com.intellij.openapi.localVcs.UpToDateLineNumberProvider; import com.intellij.openapi.project.Project; import com.intellij.openapi.vcs.VcsKey; import com.intellij.openapi.vcs.diff.DiffProvider; import com.intellij.openapi.vcs.history.VcsFileRevision; import com.intellij.openapi.vcs.history.VcsRevisionNumber; import com.intellij.openapi.vcs.vfs.VcsVirtualFile; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.Consumer; import com.intellij.util.containers.ContainerUtil; import java.util.HashMap; import com.intellij.util.text.JBDateFormat; import consulo.logging.Logger; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.*; /** * Represents annotations ("vcs blame") for some file in a specific revision * @see AnnotationProvider */ public abstract class FileAnnotation { private static final Logger LOG = Logger.getInstance(FileAnnotation.class); @Nonnull private final Project myProject; private Runnable myCloser; private Consumer<FileAnnotation> myReloader; protected FileAnnotation(@Nonnull Project project) { myProject = project; } @Nonnull public Project getProject() { return myProject; } @javax.annotation.Nullable public VcsKey getVcsKey() { return null; } /** * @return annotated file * <p> * If annotations are called on a local file, it can be this file. * If annotations are called on a specific revision, it can be corresponding {@link VcsVirtualFile}. * Note: file content might differ from content in annotated revision {@link #getAnnotatedContent}. */ @javax.annotation.Nullable public VirtualFile getFile() { return null; } /** * @return file content in the annotated revision * <p> * It might differ from {@code getFile()} content. Ex: annotations for a local file, that has non-committed changes. * In this case {@link UpToDateLineNumberProvider} will be used to transfer lines between local and annotated revisions. */ @javax.annotation.Nullable public abstract String getAnnotatedContent(); /** * @return annotated revision * <p> * This information might be used to close annotations on local file if current revision was changed, * and invocation of AnnotationProvider on this file will produce different results - see {@link #isBaseRevisionChanged}. */ @javax.annotation.Nullable public abstract VcsRevisionNumber getCurrentRevision(); /** * @param number current revision number {@link DiffProvider#getCurrentRevision} * @return whether annotations should be updated */ public boolean isBaseRevisionChanged(@Nonnull VcsRevisionNumber number) { final VcsRevisionNumber currentRevision = getCurrentRevision(); return currentRevision != null && !currentRevision.equals(number); } /** * This method is invoked when the file annotation is no longer used. * NB: method might be invoked multiple times */ public abstract void dispose(); /** * Get annotation aspects. * The typical aspects are revision number, date, author. * The aspects are displayed each in own column in the returned order. */ @Nonnull public abstract LineAnnotationAspect[] getAspects(); /** * @return number of lines in annotated content */ public abstract int getLineCount(); /** * The tooltip that is shown over annotation. * Typically, this is a detailed info about related revision. ex: long revision number, commit message */ @javax.annotation.Nullable public abstract String getToolTip(int lineNumber); /** * @return last revision that modified this line. */ @javax.annotation.Nullable public abstract VcsRevisionNumber getLineRevisionNumber(int lineNumber); /** * @return time of the last modification of this line. * Typically, this is a timestamp associated with {@link #getLineRevisionNumber} */ @javax.annotation.Nullable public abstract Date getLineDate(int lineNumber); /** * @return revisions that are mentioned in the annotations, from newest to oldest * Can be used to sort revisions, if they can't be sorted by {@code Date} or show file modification number for a revision. */ @javax.annotation.Nullable public abstract List<VcsFileRevision> getRevisions(); /** * Allows to switch between different representation modes. * <p> * Ex: in SVN it's possible to show revision that modified line - "svn blame -g", * or the commit that merged that change into current branch - "svn blame". * <p> * when "show merge sources" is turned on, {@link #getLineRevisionNumber} returns merge source revision, * while {@link #originalRevision} returns merge revision. */ @Nullable public AnnotationSourceSwitcher getAnnotationSourceSwitcher() { return null; } /** * @return last revision that modified this line in current branch. * @see #getAnnotationSourceSwitcher() * @see #getLineRevisionNumber(int) */ @javax.annotation.Nullable public VcsRevisionNumber originalRevision(int lineNumber) { return getLineRevisionNumber(lineNumber); } /** * Notify that annotations should be closed */ public final void close() { myCloser.run(); } /** * Notify that annotation information has changed, and should be updated. * If `this` is visible, hide it and show new one instead. * If `this` is not visible, do nothing. * * @param newFileAnnotation annotations to be shown */ public final void reload(@Nonnull FileAnnotation newFileAnnotation) { if (myReloader != null) myReloader.consume(newFileAnnotation); } /** * @see #close() */ public final void setCloser(@Nonnull Runnable closer) { myCloser = closer; } /** * @see #reload() */ public final void setReloader(@javax.annotation.Nullable Consumer<FileAnnotation> reloader) { myReloader = reloader; } @Deprecated public boolean revisionsNotEmpty() { return true; } @Nullable public CurrentFileRevisionProvider getCurrentFileRevisionProvider() { return createDefaultCurrentFileRevisionProvider(this); } @javax.annotation.Nullable public PreviousFileRevisionProvider getPreviousFileRevisionProvider() { return createDefaultPreviousFileRevisionProvider(this); } @javax.annotation.Nullable public AuthorsMappingProvider getAuthorsMappingProvider() { return createDefaultAuthorsMappingProvider(this); } @javax.annotation.Nullable public RevisionsOrderProvider getRevisionsOrderProvider() { return createDefaultRevisionsOrderProvider(this); } public interface CurrentFileRevisionProvider { @javax.annotation.Nullable VcsFileRevision getRevision(int lineNumber); } public interface PreviousFileRevisionProvider { @javax.annotation.Nullable VcsFileRevision getPreviousRevision(int lineNumber); @javax.annotation.Nullable VcsFileRevision getLastRevision(); } public interface AuthorsMappingProvider { @Nonnull Map<VcsRevisionNumber, String> getAuthors(); } public interface RevisionsOrderProvider { @Nonnull List<List<VcsRevisionNumber>> getOrderedRevisions(); } @Nonnull public static String formatDate(@Nonnull Date date) { return JBDateFormat.getFormatter("vcs.annotate").formatPrettyDate(date); } @javax.annotation.Nullable private static CurrentFileRevisionProvider createDefaultCurrentFileRevisionProvider(@Nonnull FileAnnotation annotation) { List<VcsFileRevision> revisions = annotation.getRevisions(); if (revisions == null) return null; Map<VcsRevisionNumber, VcsFileRevision> map = new HashMap<>(); for (VcsFileRevision revision : revisions) { map.put(revision.getRevisionNumber(), revision); } List<VcsFileRevision> lineToRevision = new ArrayList<>(annotation.getLineCount()); for (int i = 0; i < annotation.getLineCount(); i++) { lineToRevision.add(map.get(annotation.getLineRevisionNumber(i))); } return (lineNumber) -> { LOG.assertTrue(lineNumber >= 0 && lineNumber < lineToRevision.size()); return lineToRevision.get(lineNumber); }; } @Nullable private static PreviousFileRevisionProvider createDefaultPreviousFileRevisionProvider(@Nonnull FileAnnotation annotation) { List<VcsFileRevision> revisions = annotation.getRevisions(); if (revisions == null) return null; Map<VcsRevisionNumber, VcsFileRevision> map = new HashMap<>(); for (int i = 0; i < revisions.size(); i++) { VcsFileRevision revision = revisions.get(i); VcsFileRevision previousRevision = i + 1 < revisions.size() ? revisions.get(i + 1) : null; map.put(revision.getRevisionNumber(), previousRevision); } List<VcsFileRevision> lineToRevision = new ArrayList<>(annotation.getLineCount()); for (int i = 0; i < annotation.getLineCount(); i++) { lineToRevision.add(map.get(annotation.getLineRevisionNumber(i))); } VcsFileRevision lastRevision = ContainerUtil.getFirstItem(revisions); return new PreviousFileRevisionProvider() { @javax.annotation.Nullable @Override public VcsFileRevision getPreviousRevision(int lineNumber) { LOG.assertTrue(lineNumber >= 0 && lineNumber < lineToRevision.size()); return lineToRevision.get(lineNumber); } @javax.annotation.Nullable @Override public VcsFileRevision getLastRevision() { return lastRevision; } }; } @javax.annotation.Nullable private static AuthorsMappingProvider createDefaultAuthorsMappingProvider(@Nonnull FileAnnotation annotation) { List<VcsFileRevision> revisions = annotation.getRevisions(); if (revisions == null) return null; Map<VcsRevisionNumber, String> authorsMapping = new HashMap<>(); for (VcsFileRevision revision : revisions) { String author = revision.getAuthor(); if (author != null) authorsMapping.put(revision.getRevisionNumber(), author); } return () -> authorsMapping; } @javax.annotation.Nullable private static RevisionsOrderProvider createDefaultRevisionsOrderProvider(@Nonnull FileAnnotation annotation) { List<VcsFileRevision> revisions = annotation.getRevisions(); if (revisions == null) return null; List<List<VcsRevisionNumber>> orderedRevisions = ContainerUtil.map(revisions, (revision) -> { return Collections.singletonList(revision.getRevisionNumber()); }); return () -> orderedRevisions; } }
package com.devopsbuddy.web.controllers; import com.devopsbuddy.backend.persistence.domain.backend.Plan; import com.devopsbuddy.backend.persistence.domain.backend.Role; import com.devopsbuddy.backend.persistence.domain.backend.User; import com.devopsbuddy.backend.persistence.domain.backend.UserRole; import com.devopsbuddy.backend.service.PlanService; import com.devopsbuddy.backend.service.S3Service; import com.devopsbuddy.backend.service.StripeService; import com.devopsbuddy.backend.service.UserService; import com.devopsbuddy.enums.PlansEnum; import com.devopsbuddy.enums.RolesEnum; import com.devopsbuddy.enums.SignUpEnum; import com.devopsbuddy.exceptions.S3Exception; import com.devopsbuddy.exceptions.StripeException; import com.devopsbuddy.utils.StripeUtils; import com.devopsbuddy.utils.UserUtils; import com.devopsbuddy.web.domain.frontend.BasicAccountPayload; import com.devopsbuddy.web.domain.frontend.ProAccountPayload; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.util.StringUtils; import org.springframework.web.bind.annotation.*; import org.springframework.web.multipart.MultipartFile; import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpServletRequest; import javax.validation.Valid; import java.io.IOException; import java.time.Clock; import java.time.LocalDate; import java.util.*; /** * Sign Up Controller to control the creation of user information and credentials * * Created by root on 15/06/17. */ @Controller public class SignUpController { /** The application logger */ private static final Logger LOG = LoggerFactory.getLogger(SignUpController.class); private static final String GENERIC_ERROR_VIEW_NAME = "error/genericError"; // Spring instantiates the object through DI @Autowired private UserService userService; // Spring instantiates the object through DI @Autowired private PlanService planService; // Spring instantiates the object through DI @Autowired private S3Service s3Service; // Spring instantiates the object through DI @Autowired private StripeService stripeService; public static final String SIGNUP_URL_MAPPING = "/signup"; public static final String PAYLOAD_MODEL_KEY_NAME = "payload"; @RequestMapping(value = SIGNUP_URL_MAPPING, method = RequestMethod.GET) public String signUpGet(@RequestParam("planId") int planId, ModelMap model) { if((planId != PlansEnum.BASIC.getId()) && (planId != PlansEnum.PRO.getId())){ throw new IllegalArgumentException("Plan is not valid"); } model.addAttribute(SignUpEnum.PAYLOAD_MODEL_KEY_NAME.getValue(), new ProAccountPayload()); return SignUpEnum.SUBSCRIPTION_VIEW_NAME.getValue(); } /** * - Method invoked by Sign Up button on the form HTML page - * * It creates an user based on the information inserted by the use on the HTML page * and saves it in the database * * @param planId The plan id chosen by the user, if it's Basic or Pro * @param file The profile image file uploaded by the user to Amazon S3 Cloud bucket * @param payload The front end pojo in which the user inserted his data * @param model The Spring Model that manipulates data objects in the screen * @return The Subscription view name with the message to the user if was successful or not * @throws IOException The exception if an error occurred while saving the image file on Amazon S3 */ @RequestMapping(value = SIGNUP_URL_MAPPING, method = RequestMethod.POST) public String signUpPost(@RequestParam(name = "planId", required = true) int planId, @RequestParam(name = "file", required = false) MultipartFile file, @ModelAttribute(PAYLOAD_MODEL_KEY_NAME) @Valid ProAccountPayload payload, ModelMap model) throws IOException { // Verifies if the plan exists according to the plan id passed as parameter if ((planId != PlansEnum.BASIC.getId()) && (planId != PlansEnum.PRO.getId())){ model.addAttribute(SignUpEnum.SIGNED_UP_MESSAGE_KEY.getValue(), "false"); model.addAttribute(SignUpEnum.ERROR_MESSAGE_KEY.getValue(), "Plan doesn't exist"); return SignUpEnum.SUBSCRIPTION_VIEW_NAME.getValue(); } // Checks if there is already an user object by email or username conditions this.checkForDuplicates(payload, model); // Variable to set true or false if there is a duplicated username or email boolean duplicates = false; // List of error messages to be shown to the user on the bootstrap alert on HTML page0 // A list is necessary because we check if the username and the email are duplicated List<String> errorMessages = new ArrayList<>(); if (model.containsKey(SignUpEnum.DUPLICATED_USERNAME_KEY.getValue())){ LOG.warn("The username already exists. Displaying error to the user"); model.addAttribute(SignUpEnum.SIGNED_UP_MESSAGE_KEY.getValue(), "false"); errorMessages.add("Username already exists"); duplicates = true; } if (model.containsKey(SignUpEnum.DUPLICATED_EMAIL_KEY.getValue())){ LOG.warn("The email already exists. Displaying error to the user"); model.addAttribute(SignUpEnum.SIGNED_UP_MESSAGE_KEY.getValue(), "false"); errorMessages.add("Email already exists"); duplicates = true; } // Check if the duplicated flag was set to true or is kept in false if (duplicates){ model.addAttribute(SignUpEnum.ERROR_MESSAGE_KEY.getValue(), errorMessages); return SignUpEnum.SUBSCRIPTION_VIEW_NAME.getValue(); } // There are certain info that the user doesn't set, such as profile image URL, Stripe // customer id, plans and roles LOG.debug("Transforming user payload into user domain object"); User user = UserUtils.fromWebUserToDomainUser(payload); // Stores the profile image on Amazon S3 and stores the URL in the user's record if ((file != null) && (!file.isEmpty())){ String profileImageUrl = s3Service.storeProfileImage(file, payload.getUsername()); if (profileImageUrl != null){ user.setProfileImageUrl(profileImageUrl); } else { LOG.warn("There was a problem uploading the profile image to S3. The user's profile will be created without the image."); } } // Sets the plan and the roles (depending on the chosen plan) LOG.debug("Retrieving plan from the database"); Plan selectedPlan = planService.findPlanById(planId); if (selectedPlan == null){ LOG.error("The plan id {} could not be found. Throwing exception.", planId); model.addAttribute(SignUpEnum.SIGNED_UP_MESSAGE_KEY.getValue(), "false"); model.addAttribute(SignUpEnum.ERROR_MESSAGE_KEY.getValue(), "Plan id not found"); return SignUpEnum.SUBSCRIPTION_VIEW_NAME.getValue(); } user.setPlan(selectedPlan); User registeredUser = null; // By default users get BASIC ROLE Set<UserRole> userRoles = new HashSet<>(); if (planId == PlansEnum.BASIC.getId()){ userRoles.add(new UserRole(user, new Role(RolesEnum.BASIC))); registeredUser = userService.createUser(user, PlansEnum.BASIC, userRoles); } else { // If the user choose Pro plan, it gets the credit card info to process the purchase userRoles.add(new UserRole(user, new Role(RolesEnum.PRO))); // Extra precaution in case the POST method is invoked programmatically if (StringUtils.isEmpty(payload.getCardCode()) || StringUtils.isEmpty(payload.getCardNumber()) || StringUtils.isEmpty(payload.getCardMonth()) || StringUtils.isEmpty(payload.getCardYear())) { LOG.error("One or more credit card fields is null or empty. Returning error to the user."); model.addAttribute(SignUpEnum.SIGNED_UP_MESSAGE_KEY.getValue(), "false"); model.addAttribute(SignUpEnum.ERROR_MESSAGE_KEY.getValue(), "One or more credit card fields is null or empty"); return SignUpEnum.SUBSCRIPTION_VIEW_NAME.getValue(); } // If the user has selected Pro Account, creates the Stripe customer to store // the Stripe customer id in the database Map<String, Object> stripeTokenParms = StripeUtils.extractTokenParamsFromSignUpPayload(payload); Map<String, Object> customerParams = new HashMap<>(); customerParams.put("description", "DevOps Buddy Customer. Username: " + payload.getUsername()); customerParams.put("email", payload.getEmail()); // customerParams.put("plan", selectedPlan.getId()); LOG.info("Subscribing the customer to plan {}", selectedPlan.getName()); String stripeCustomerId = stripeService.createCustomer(stripeTokenParms, customerParams); LOG.info("Username: {} has been subscribed to Stripe", payload.getUsername()); user.setStripeCustomerId(stripeCustomerId); registeredUser = userService.createUser(user, PlansEnum.PRO, userRoles); LOG.debug(payload.toString()); } // Auto logins the registered user after subscribe him Authentication auth = new UsernamePasswordAuthenticationToken( registeredUser, null, registeredUser.getAuthorities()); SecurityContextHolder.getContext().setAuthentication(auth); LOG.info("User created successfully"); // Set the message key to show the correct bootstrap alert message on the screen model.addAttribute(SignUpEnum.SIGNED_UP_MESSAGE_KEY.getValue(), "true"); return SignUpEnum.SUBSCRIPTION_VIEW_NAME.getValue(); } /** * - Invoked by signUpPost method - * * Checks if the username/email are duplicates and sets error flags in the model. * Side effect: the method might set attributes on model * * @param payload * @param model */ private void checkForDuplicates(BasicAccountPayload payload, ModelMap model) { // Username if (userService.findByUserName(payload.getUsername()) != null){ // Add the 'true' value to the attribute of the model model.addAttribute(SignUpEnum.DUPLICATED_USERNAME_KEY.getValue(), "true"); } if (userService.findUserByEmail(payload.getEmail()) != null){ // Add the 'true' value to the attribute of the model model.addAttribute(SignUpEnum.DUPLICATED_EMAIL_KEY.getValue(), "true"); } } /** * Using the Spring MVC Exception Handling to handle the StripeException and S3Exception classes * because both exceptions are only on the journey of sign up users to the database via the * Spring Controller. * It handles the exceptions created to the Stripe service and the Amazon S3 Cloud service * * @param request The HttpServletRequest object for manipulate the view * @param exception The exception that Spring will set automatically if it occurs * @return the ModelAndView object(combination of a ModelMap to a View name) with the data * filled to be shown on generalError HTML page */ @ExceptionHandler({StripeException.class, S3Exception.class}) public ModelAndView signUpException(HttpServletRequest request, Exception exception){ LOG.error("Request {} raised exception {}", request.getRequestURL(), exception); ModelAndView mav = new ModelAndView(); mav.addObject("exception", exception); mav.addObject("url", request.getRequestURL()); mav.addObject("timestamp", LocalDate.now(Clock.systemUTC())); mav.setViewName(SignUpEnum.GENERIC_ERROR_VIEW_NAME.getValue()); return mav; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis; import static com.google.common.collect.Iterables.concat; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import com.google.common.eventbus.EventBus; import com.google.devtools.build.lib.actions.ActionAnalysisMetadata; import com.google.devtools.build.lib.actions.ActionGraph; import com.google.devtools.build.lib.actions.ActionLookupValue; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ArtifactFactory; import com.google.devtools.build.lib.actions.ArtifactOwner; import com.google.devtools.build.lib.actions.Root; import com.google.devtools.build.lib.analysis.DependencyResolver.InconsistentAspectOrderException; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.config.BuildConfigurationCollection; import com.google.devtools.build.lib.analysis.config.BuildOptions; import com.google.devtools.build.lib.analysis.config.ComposingSplitTransition; import com.google.devtools.build.lib.analysis.config.ConfigMatchingProvider; import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.events.StoredEventHandler; import com.google.devtools.build.lib.packages.AspectClass; import com.google.devtools.build.lib.packages.AspectDescriptor; import com.google.devtools.build.lib.packages.AspectParameters; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.Attribute.ConfigurationTransition; import com.google.devtools.build.lib.packages.Attribute.Transition; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.packages.NativeAspectClass; import com.google.devtools.build.lib.packages.NoSuchPackageException; import com.google.devtools.build.lib.packages.NoSuchTargetException; import com.google.devtools.build.lib.packages.NoSuchThingException; import com.google.devtools.build.lib.packages.PackageSpecification; import com.google.devtools.build.lib.packages.RawAttributeMapper; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.RuleTransitionFactory; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.pkgcache.LoadingResult; import com.google.devtools.build.lib.rules.test.CoverageReportActionFactory; import com.google.devtools.build.lib.rules.test.CoverageReportActionFactory.CoverageReportActionsWrapper; import com.google.devtools.build.lib.rules.test.InstrumentedFilesProvider; import com.google.devtools.build.lib.skyframe.AspectValue; import com.google.devtools.build.lib.skyframe.AspectValue.AspectKey; import com.google.devtools.build.lib.skyframe.AspectValue.AspectValueKey; import com.google.devtools.build.lib.skyframe.ConfiguredTargetKey; import com.google.devtools.build.lib.skyframe.ConfiguredTargetValue; import com.google.devtools.build.lib.skyframe.CoverageReportValue; import com.google.devtools.build.lib.skyframe.SkyframeAnalysisResult; import com.google.devtools.build.lib.skyframe.SkyframeBuildView; import com.google.devtools.build.lib.skyframe.SkyframeExecutor; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.SkylarkImport; import com.google.devtools.build.lib.syntax.SkylarkImports; import com.google.devtools.build.lib.syntax.SkylarkImports.SkylarkImportSyntaxException; import com.google.devtools.build.lib.util.OrderedSetMultimap; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.util.RegexFilter; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.WalkableGraph; import com.google.devtools.common.options.Converter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParser.OptionUsageRestrictions; import com.google.devtools.common.options.OptionsParsingException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Logger; import javax.annotation.Nullable; /** * <p>The BuildView presents a semantically-consistent and transitively-closed * dependency graph for some set of packages. * * <h2>Package design</h2> * * <p>This package contains the Blaze dependency analysis framework (aka * "analysis phase"). The goal of this code is to perform semantic analysis of * all of the build targets required for a given build, to report * errors/warnings for any problems in the input, and to construct an "action * graph" (see {@code lib.actions} package) correctly representing the work to * be done during the execution phase of the build. * * <p><b>Configurations</b> the inputs to a build come from two sources: the * intrinsic inputs, specified in the BUILD file, are called <em>targets</em>. * The environmental inputs, coming from the build tool, the command-line, or * configuration files, are called the <em>configuration</em>. Only when a * target and a configuration are combined is there sufficient information to * perform a build. </p> * * <p>Targets are implemented by the {@link Target} hierarchy in the {@code * lib.packages} code. Configurations are implemented by {@link * BuildConfiguration}. The pair of these together is represented by an * instance of class {@link ConfiguredTarget}; this is the root of a hierarchy * with different implementations for each kind of target: source file, derived * file, rules, etc. * * <p>The framework code in this package (as opposed to its subpackages) is * responsible for constructing the {@code ConfiguredTarget} graph for a given * target and configuration, taking care of such issues as: * <ul> * <li>caching common subgraphs. * <li>detecting and reporting cycles. * <li>correct propagation of errors through the graph. * <li>reporting universal errors, such as dependencies from production code * to tests, or to experimental branches. * <li>capturing and replaying errors. * <li>maintaining the graph from one build to the next to * avoid unnecessary recomputation. * <li>checking software licenses. * </ul> * * <p>See also {@link ConfiguredTarget} which documents some important * invariants. */ public class BuildView { /** * Options that affect the <i>mechanism</i> of analysis. These are distinct from {@link * com.google.devtools.build.lib.analysis.config.BuildOptions}, which affect the <i>value</i> of a * BuildConfiguration. */ public static class Options extends OptionsBase { @Option( name = "loading_phase_threads", defaultValue = "-1", category = "what", converter = LoadingPhaseThreadCountConverter.class, help = "Number of parallel threads to use for the loading/analysis phase." ) public int loadingPhaseThreads; @Option( name = "keep_going", abbrev = 'k', defaultValue = "false", category = "strategy", help = "Continue as much as possible after an error. While the target that failed, and those " + "that depend on it, cannot be analyzed (or built), the other prerequisites of " + "these targets can be analyzed (or built) all the same." ) public boolean keepGoing; @Option( name = "analysis_warnings_as_errors", deprecationWarning = "analysis_warnings_as_errors is now a no-op and will be removed in" + " an upcoming Blaze release", defaultValue = "false", category = "strategy", help = "Treat visible analysis warnings as errors." ) public boolean analysisWarningsAsErrors; @Option( name = "discard_analysis_cache", defaultValue = "false", category = "strategy", help = "Discard the analysis cache immediately after the analysis phase completes." + " Reduces memory usage by ~10%, but makes further incremental builds slower." ) public boolean discardAnalysisCache; @Option( name = "experimental_extra_action_filter", defaultValue = "", category = "experimental", converter = RegexFilter.RegexFilterConverter.class, help = "Filters set of targets to schedule extra_actions for." ) public RegexFilter extraActionFilter; @Option( name = "experimental_extra_action_top_level_only", defaultValue = "false", category = "experimental", help = "Only schedules extra_actions for top level targets." ) public boolean extraActionTopLevelOnly; @Option( name = "experimental_extra_action_top_level_only_with_aspects", defaultValue = "true", category = "experimental", help = "If true and --experimental_extra_action_top_level_only=true, will include actions " + "from aspects injected by top-level rules. " + "This is an escape hatch in case commit df9e5e16c370391098c4432779ad4d1c9dd693ca " + "breaks something." ) public boolean extraActionTopLevelOnlyWithAspects; @Option( name = "version_window_for_dirty_node_gc", defaultValue = "0", optionUsageRestrictions = OptionUsageRestrictions.UNDOCUMENTED, help = "Nodes that have been dirty for more than this many versions will be deleted" + " from the graph upon the next update. Values must be non-negative long integers," + " or -1 indicating the maximum possible window." ) public long versionWindowForDirtyNodeGc; @Deprecated @Option( name = "experimental_interleave_loading_and_analysis", defaultValue = "true", category = "experimental", help = "No-op." ) public boolean interleaveLoadingAndAnalysis; } private static Logger LOG = Logger.getLogger(BuildView.class.getName()); private final BlazeDirectories directories; private final SkyframeExecutor skyframeExecutor; private final SkyframeBuildView skyframeBuildView; private final ConfiguredRuleClassProvider ruleClassProvider; /** * A factory class to create the coverage report action. May be null. */ @Nullable private final CoverageReportActionFactory coverageReportActionFactory; @VisibleForTesting public Set<SkyKey> getSkyframeEvaluatedTargetKeysForTesting() { return skyframeBuildView.getEvaluatedTargetKeys(); } /** The number of targets freshly evaluated in the last analysis run. */ public int getTargetsVisited() { return skyframeBuildView.getEvaluatedTargetKeys().size(); } public BuildView(BlazeDirectories directories, ConfiguredRuleClassProvider ruleClassProvider, SkyframeExecutor skyframeExecutor, CoverageReportActionFactory coverageReportActionFactory) { this.directories = directories; this.coverageReportActionFactory = coverageReportActionFactory; this.ruleClassProvider = ruleClassProvider; this.skyframeExecutor = Preconditions.checkNotNull(skyframeExecutor); this.skyframeBuildView = skyframeExecutor.getSkyframeBuildView(); } /** * Returns whether the given configured target has errors. */ @VisibleForTesting public boolean hasErrors(ConfiguredTarget configuredTarget) { return configuredTarget == null; } /** * Sets the configurations. Not thread-safe. DO NOT CALL except from tests! */ @VisibleForTesting public void setConfigurationsForTesting(BuildConfigurationCollection configurations) { skyframeBuildView.setConfigurations(configurations); } public ArtifactFactory getArtifactFactory() { return skyframeBuildView.getArtifactFactory(); } @VisibleForTesting WorkspaceStatusAction getLastWorkspaceBuildInfoActionForTesting() { return skyframeExecutor.getLastWorkspaceStatusActionForTesting(); } @Override public int hashCode() { throw new UnsupportedOperationException(); // avoid nondeterminism } /** * Return value for {@link BuildView#update} and {@code BuildTool.prepareToBuild}. */ public static final class AnalysisResult { private final ImmutableList<ConfiguredTarget> targetsToBuild; @Nullable private final ImmutableList<ConfiguredTarget> targetsToTest; @Nullable private final String error; private final ActionGraph actionGraph; private final ImmutableSet<Artifact> artifactsToBuild; private final ImmutableSet<ConfiguredTarget> parallelTests; private final ImmutableSet<ConfiguredTarget> exclusiveTests; @Nullable private final TopLevelArtifactContext topLevelContext; private final ImmutableList<AspectValue> aspects; private final ImmutableMap<PackageIdentifier, Path> packageRoots; private final String workspaceName; private AnalysisResult( Collection<ConfiguredTarget> targetsToBuild, Collection<AspectValue> aspects, Collection<ConfiguredTarget> targetsToTest, @Nullable String error, ActionGraph actionGraph, Collection<Artifact> artifactsToBuild, Collection<ConfiguredTarget> parallelTests, Collection<ConfiguredTarget> exclusiveTests, TopLevelArtifactContext topLevelContext, ImmutableMap<PackageIdentifier, Path> packageRoots, String workspaceName) { this.targetsToBuild = ImmutableList.copyOf(targetsToBuild); this.aspects = ImmutableList.copyOf(aspects); this.targetsToTest = targetsToTest == null ? null : ImmutableList.copyOf(targetsToTest); this.error = error; this.actionGraph = actionGraph; this.artifactsToBuild = ImmutableSet.copyOf(artifactsToBuild); this.parallelTests = ImmutableSet.copyOf(parallelTests); this.exclusiveTests = ImmutableSet.copyOf(exclusiveTests); this.topLevelContext = topLevelContext; this.packageRoots = packageRoots; this.workspaceName = workspaceName; } /** * Returns configured targets to build. */ public Collection<ConfiguredTarget> getTargetsToBuild() { return targetsToBuild; } /** * The map from package names to the package root where each package was found; this is used to * set up the symlink tree. */ public ImmutableMap<PackageIdentifier, Path> getPackageRoots() { return packageRoots; } /** * Returns aspects of configured targets to build. * * <p>If this list is empty, build the targets returned by {@code getTargetsToBuild()}. * Otherwise, only build these aspects of the targets returned by {@code getTargetsToBuild()}. */ public Collection<AspectValue> getAspects() { return aspects; } /** * Returns the configured targets to run as tests, or {@code null} if testing was not * requested (e.g. "build" command rather than "test" command). */ @Nullable public Collection<ConfiguredTarget> getTargetsToTest() { return targetsToTest; } public ImmutableSet<Artifact> getAdditionalArtifactsToBuild() { return artifactsToBuild; } public ImmutableSet<ConfiguredTarget> getExclusiveTests() { return exclusiveTests; } public ImmutableSet<ConfiguredTarget> getParallelTests() { return parallelTests; } /** * Returns an error description (if any). */ @Nullable public String getError() { return error; } public boolean hasError() { return error != null; } /** * Returns the action graph. */ public ActionGraph getActionGraph() { return actionGraph; } public TopLevelArtifactContext getTopLevelContext() { return topLevelContext; } public String getWorkspaceName() { return workspaceName; } } /** * Returns the collection of configured targets corresponding to any of the provided targets. */ @VisibleForTesting static Iterable<? extends ConfiguredTarget> filterTestsByTargets( Collection<? extends ConfiguredTarget> targets, final Set<? extends Target> allowedTargets) { return Iterables.filter( targets, new Predicate<ConfiguredTarget>() { @Override public boolean apply(ConfiguredTarget rule) { return allowedTargets.contains(rule.getTarget()); } }); } @ThreadCompatible public AnalysisResult update( LoadingResult loadingResult, BuildConfigurationCollection configurations, List<String> aspects, Options viewOptions, TopLevelArtifactContext topLevelOptions, ExtendedEventHandler eventHandler, EventBus eventBus) throws ViewCreationFailedException, InterruptedException { LOG.info("Starting analysis"); pollInterruptedStatus(); skyframeBuildView.resetEvaluatedConfiguredTargetKeysSet(); Collection<Target> targets = loadingResult.getTargets(); eventBus.post(new AnalysisPhaseStartedEvent(targets)); skyframeBuildView.setConfigurations(configurations); // Determine the configurations. List<TargetAndConfiguration> topLevelTargetsWithConfigs = nodesForTopLevelTargets(configurations, targets, eventHandler); // Report the generated association of targets to configurations Multimap<Label, BuildConfiguration> byLabel = ArrayListMultimap.<Label, BuildConfiguration>create(); for (TargetAndConfiguration pair : topLevelTargetsWithConfigs) { byLabel.put(pair.getLabel(), pair.getConfiguration()); } for (Label label : byLabel.keySet()) { eventBus.post(new TargetConfiguredEvent(label, byLabel.get(label))); } List<ConfiguredTargetKey> topLevelCtKeys = Lists.transform(topLevelTargetsWithConfigs, new Function<TargetAndConfiguration, ConfiguredTargetKey>() { @Override public ConfiguredTargetKey apply(TargetAndConfiguration node) { return new ConfiguredTargetKey(node.getLabel(), node.getConfiguration()); } }); List<AspectValueKey> aspectKeys = new ArrayList<>(); for (String aspect : aspects) { // Syntax: label%aspect int delimiterPosition = aspect.indexOf('%'); if (delimiterPosition >= 0) { // TODO(jfield): For consistency with Skylark loads, the aspect should be specified // as an absolute path. Also, we probably need to do at least basic validation of // path well-formedness here. String bzlFileLoadLikeString = aspect.substring(0, delimiterPosition); if (!bzlFileLoadLikeString.startsWith("//") && !bzlFileLoadLikeString.startsWith("@")) { // "Legacy" behavior of '--aspects' parameter. bzlFileLoadLikeString = PathFragment.create("/" + bzlFileLoadLikeString).toString(); if (bzlFileLoadLikeString.endsWith(".bzl")) { bzlFileLoadLikeString = bzlFileLoadLikeString.substring(0, bzlFileLoadLikeString.length() - ".bzl".length()); } } SkylarkImport skylarkImport; try { skylarkImport = SkylarkImports.create(bzlFileLoadLikeString); } catch (SkylarkImportSyntaxException e) { throw new ViewCreationFailedException( String.format("Invalid aspect '%s': %s", aspect, e.getMessage()), e); } String skylarkFunctionName = aspect.substring(delimiterPosition + 1); for (TargetAndConfiguration targetSpec : topLevelTargetsWithConfigs) { aspectKeys.add( AspectValue.createSkylarkAspectKey( targetSpec.getLabel(), // For invoking top-level aspects, use the top-level configuration for both the // aspect and the base target while the top-level configuration is untrimmed. targetSpec.getConfiguration(), targetSpec.getConfiguration(), skylarkImport, skylarkFunctionName)); } } else { final NativeAspectClass aspectFactoryClass = ruleClassProvider.getNativeAspectClassMap().get(aspect); if (aspectFactoryClass != null) { for (TargetAndConfiguration targetSpec : topLevelTargetsWithConfigs) { // For invoking top-level aspects, use the top-level configuration for both the // aspect and the base target while the top-level configuration is untrimmed. BuildConfiguration configuration = targetSpec.getConfiguration(); aspectKeys.add( AspectValue.createAspectKey( targetSpec.getLabel(), configuration, new AspectDescriptor(aspectFactoryClass, AspectParameters.EMPTY), configuration )); } } else { throw new ViewCreationFailedException("Aspect '" + aspect + "' is unknown"); } } } skyframeExecutor.injectWorkspaceStatusData(loadingResult.getWorkspaceName()); SkyframeAnalysisResult skyframeAnalysisResult; try { skyframeAnalysisResult = skyframeBuildView.configureTargets( eventHandler, topLevelCtKeys, aspectKeys, eventBus, viewOptions.keepGoing, viewOptions.loadingPhaseThreads); setArtifactRoots(skyframeAnalysisResult.getPackageRoots()); } finally { skyframeBuildView.clearInvalidatedConfiguredTargets(); } int numTargetsToAnalyze = topLevelTargetsWithConfigs.size(); int numSuccessful = skyframeAnalysisResult.getConfiguredTargets().size(); if (0 < numSuccessful && numSuccessful < numTargetsToAnalyze) { String msg = String.format("Analysis succeeded for only %d of %d top-level targets", numSuccessful, numTargetsToAnalyze); eventHandler.handle(Event.info(msg)); LOG.info(msg); } AnalysisResult result = createResult( eventHandler, loadingResult, topLevelOptions, viewOptions, skyframeAnalysisResult); LOG.info("Finished analysis"); return result; } private AnalysisResult createResult( ExtendedEventHandler eventHandler, LoadingResult loadingResult, TopLevelArtifactContext topLevelOptions, BuildView.Options viewOptions, SkyframeAnalysisResult skyframeAnalysisResult) throws InterruptedException { Collection<Target> testsToRun = loadingResult.getTestsToRun(); Collection<ConfiguredTarget> configuredTargets = skyframeAnalysisResult.getConfiguredTargets(); Collection<AspectValue> aspects = skyframeAnalysisResult.getAspects(); Collection<ConfiguredTarget> allTargetsToTest = null; if (testsToRun != null) { // Determine the subset of configured targets that are meant to be run as tests. // Do not remove <ConfiguredTarget>: workaround for Java 7 type inference. allTargetsToTest = Lists.<ConfiguredTarget>newArrayList( filterTestsByTargets(configuredTargets, Sets.newHashSet(testsToRun))); } Set<Artifact> artifactsToBuild = new HashSet<>(); Set<ConfiguredTarget> parallelTests = new HashSet<>(); Set<ConfiguredTarget> exclusiveTests = new HashSet<>(); // build-info and build-changelist. Collection<Artifact> buildInfoArtifacts = skyframeExecutor.getWorkspaceStatusArtifacts(eventHandler); Preconditions.checkState(buildInfoArtifacts.size() == 2, buildInfoArtifacts); artifactsToBuild.addAll(buildInfoArtifacts); // Extra actions addExtraActionsIfRequested(viewOptions, configuredTargets, aspects, artifactsToBuild); // Coverage NestedSet<Artifact> baselineCoverageArtifacts = getBaselineCoverageArtifacts(configuredTargets); Iterables.addAll(artifactsToBuild, baselineCoverageArtifacts); if (coverageReportActionFactory != null) { CoverageReportActionsWrapper actionsWrapper; actionsWrapper = coverageReportActionFactory.createCoverageReportActionsWrapper( eventHandler, directories, allTargetsToTest, baselineCoverageArtifacts, getArtifactFactory(), CoverageReportValue.ARTIFACT_OWNER); if (actionsWrapper != null) { ImmutableList<ActionAnalysisMetadata> actions = actionsWrapper.getActions(); skyframeExecutor.injectCoverageReportData(actions); artifactsToBuild.addAll(actionsWrapper.getCoverageOutputs()); } } // Tests. This must come last, so that the exclusive tests are scheduled after everything else. scheduleTestsIfRequested(parallelTests, exclusiveTests, topLevelOptions, allTargetsToTest); String error = createErrorMessage(loadingResult, skyframeAnalysisResult); final WalkableGraph graph = skyframeAnalysisResult.getWalkableGraph(); final ActionGraph actionGraph = new ActionGraph() { @Nullable @Override public ActionAnalysisMetadata getGeneratingAction(Artifact artifact) { ArtifactOwner artifactOwner = artifact.getArtifactOwner(); if (artifactOwner instanceof ActionLookupValue.ActionLookupKey) { SkyKey key = ActionLookupValue.key((ActionLookupValue.ActionLookupKey) artifactOwner); ActionLookupValue val; try { val = (ActionLookupValue) graph.getValue(key); } catch (InterruptedException e) { throw new IllegalStateException( "Interruption not expected from this graph: " + key, e); } return val == null ? null : val.getGeneratingActionDangerousReadJavadoc(artifact); } return null; } }; return new AnalysisResult( configuredTargets, aspects, allTargetsToTest, error, actionGraph, artifactsToBuild, parallelTests, exclusiveTests, topLevelOptions, skyframeAnalysisResult.getPackageRoots(), loadingResult.getWorkspaceName()); } @Nullable public static String createErrorMessage( LoadingResult loadingResult, @Nullable SkyframeAnalysisResult skyframeAnalysisResult) { return loadingResult.hasTargetPatternError() ? "command succeeded, but there were errors parsing the target pattern" : loadingResult.hasLoadingError() || (skyframeAnalysisResult != null && skyframeAnalysisResult.hasLoadingError()) ? "command succeeded, but there were loading phase errors" : (skyframeAnalysisResult != null && skyframeAnalysisResult.hasAnalysisError()) ? "command succeeded, but not all targets were analyzed" : null; } private static NestedSet<Artifact> getBaselineCoverageArtifacts( Collection<ConfiguredTarget> configuredTargets) { NestedSetBuilder<Artifact> baselineCoverageArtifacts = NestedSetBuilder.stableOrder(); for (ConfiguredTarget target : configuredTargets) { InstrumentedFilesProvider provider = target.getProvider(InstrumentedFilesProvider.class); if (provider != null) { baselineCoverageArtifacts.addTransitive(provider.getBaselineCoverageArtifacts()); } } return baselineCoverageArtifacts.build(); } private void addExtraActionsIfRequested(Options viewOptions, Collection<ConfiguredTarget> configuredTargets, Collection<AspectValue> aspects, Set<Artifact> artifactsToBuild) { Iterable<Artifact> extraActionArtifacts = concat( addExtraActionsFromTargets(viewOptions, configuredTargets), addExtraActionsFromAspects(viewOptions, aspects)); RegexFilter filter = viewOptions.extraActionFilter; for (Artifact artifact : extraActionArtifacts) { boolean filterMatches = filter == null || filter.isIncluded(artifact.getOwnerLabel().toString()); if (filterMatches) { artifactsToBuild.add(artifact); } } } private NestedSet<Artifact> addExtraActionsFromTargets( BuildView.Options viewOptions, Collection<ConfiguredTarget> configuredTargets) { NestedSetBuilder<Artifact> builder = NestedSetBuilder.stableOrder(); for (ConfiguredTarget target : configuredTargets) { ExtraActionArtifactsProvider provider = target.getProvider(ExtraActionArtifactsProvider.class); if (provider != null) { if (viewOptions.extraActionTopLevelOnly) { if (!viewOptions.extraActionTopLevelOnlyWithAspects) { builder.addTransitive(provider.getExtraActionArtifacts()); } else { // Collect all aspect-classes that topLevel might inject. Set<AspectClass> aspectClasses = new HashSet<>(); for (Attribute attr : target.getTarget().getAssociatedRule().getAttributes()) { aspectClasses.addAll(attr.getAspectClasses()); } builder.addTransitive(provider.getExtraActionArtifacts()); if (!aspectClasses.isEmpty()) { builder.addAll(filterTransitiveExtraActions(provider, aspectClasses)); } } } else { builder.addTransitive(provider.getTransitiveExtraActionArtifacts()); } } } return builder.build(); } /** * Returns a list of actions from 'provider' that were registered by an aspect from * 'aspectClasses'. All actions in 'provider' are considered - both direct and transitive. */ private ImmutableList<Artifact> filterTransitiveExtraActions( ExtraActionArtifactsProvider provider, Set<AspectClass> aspectClasses) { ImmutableList.Builder<Artifact> artifacts = ImmutableList.builder(); // Add to 'artifacts' all extra-actions which were registered by aspects which 'topLevel' // might have injected. for (Artifact artifact : provider.getTransitiveExtraActionArtifacts()) { ArtifactOwner owner = artifact.getArtifactOwner(); if (owner instanceof AspectKey) { if (aspectClasses.contains(((AspectKey) owner).getAspectClass())) { artifacts.add(artifact); } } } return artifacts.build(); } private NestedSet<Artifact> addExtraActionsFromAspects( BuildView.Options viewOptions, Collection<AspectValue> aspects) { NestedSetBuilder<Artifact> builder = NestedSetBuilder.stableOrder(); for (AspectValue aspect : aspects) { ExtraActionArtifactsProvider provider = aspect.getConfiguredAspect().getProvider(ExtraActionArtifactsProvider.class); if (provider != null) { if (viewOptions.extraActionTopLevelOnly) { builder.addTransitive(provider.getExtraActionArtifacts()); } else { builder.addTransitive(provider.getTransitiveExtraActionArtifacts()); } } } return builder.build(); } private static void scheduleTestsIfRequested(Collection<ConfiguredTarget> targetsToTest, Collection<ConfiguredTarget> targetsToTestExclusive, TopLevelArtifactContext topLevelOptions, Collection<ConfiguredTarget> allTestTargets) { Set<String> outputGroups = topLevelOptions.outputGroups(); if (!outputGroups.contains(OutputGroupProvider.FILES_TO_COMPILE) && !outputGroups.contains(OutputGroupProvider.COMPILATION_PREREQUISITES) && allTestTargets != null) { scheduleTests(targetsToTest, targetsToTestExclusive, allTestTargets, topLevelOptions.runTestsExclusively()); } } /** * Returns set of artifacts representing test results, writing into targetsToTest and * targetsToTestExclusive. */ private static void scheduleTests(Collection<ConfiguredTarget> targetsToTest, Collection<ConfiguredTarget> targetsToTestExclusive, Collection<ConfiguredTarget> allTestTargets, boolean isExclusive) { for (ConfiguredTarget target : allTestTargets) { if (target.getTarget() instanceof Rule) { boolean exclusive = isExclusive || TargetUtils.isExclusiveTestRule((Rule) target.getTarget()); Collection<ConfiguredTarget> testCollection = exclusive ? targetsToTestExclusive : targetsToTest; testCollection.add(target); } } } /** * Given a set of top-level targets and a configuration collection, returns the appropriate * <Target, Configuration> pair for each target. * * <p>Preserves the original input ordering. */ private List<TargetAndConfiguration> nodesForTopLevelTargets( BuildConfigurationCollection configurations, Collection<Target> targets, ExtendedEventHandler eventHandler) throws InterruptedException { // We use a hash set here to remove duplicate nodes; this can happen for input files and package // groups. LinkedHashSet<TargetAndConfiguration> nodes = new LinkedHashSet<>(targets.size()); for (BuildConfiguration config : configurations.getTargetConfigurations()) { for (Target target : targets) { nodes.add(new TargetAndConfiguration(target, config.useDynamicConfigurations() // Dynamic configurations apply top-level transitions through a different code path: // BuildConfiguration#topLevelConfigurationHook. That path has the advantages of a) // not requiring a global transitions table and b) making its choices outside core // Bazel code. ? (target.isConfigurable() ? config : null) : BuildConfigurationCollection.configureTopLevelTarget(config, target))); } } return ImmutableList.copyOf( configurations.useDynamicConfigurations() ? getDynamicConfigurations(nodes, eventHandler) : nodes); } /** * If {@link BuildConfiguration.Options#trimConfigurations()} is true, transforms a collection of * <Target, Configuration> pairs by trimming each target's configuration to only the fragments the * target and its transitive dependencies need. * * <p>Else returns configurations that unconditionally include all fragments. * * <p>Preserves the original input order (but merges duplicate nodes that might occur due to * top-level configuration transitions) . Uses original (untrimmed) configurations for targets * that can't be evaluated (e.g. due to loading phase errors). * * <p>This is suitable for feeding {@link ConfiguredTargetValue} keys: as general principle {@link * ConfiguredTarget}s should have exactly as much information in their configurations as they need * to evaluate and no more (e.g. there's no need for Android settings in a C++ configured target). */ // TODO(bazel-team): error out early for targets that fail - untrimmed configurations should // never make it through analysis (and especially not seed ConfiguredTargetValues) private LinkedHashSet<TargetAndConfiguration> getDynamicConfigurations( Iterable<TargetAndConfiguration> inputs, ExtendedEventHandler eventHandler) throws InterruptedException { Map<Label, Target> labelsToTargets = new LinkedHashMap<>(); // We'll get the configs from SkyframeExecutor#getConfigurations, which gets configurations // for deps including transitions. So to satisfy its API we repackage each target as a // Dependency with a NONE transition. Multimap<BuildConfiguration, Dependency> asDeps = ArrayListMultimap.<BuildConfiguration, Dependency>create(); for (TargetAndConfiguration targetAndConfig : inputs) { labelsToTargets.put(targetAndConfig.getLabel(), targetAndConfig.getTarget()); if (targetAndConfig.getConfiguration() != null) { asDeps.put(targetAndConfig.getConfiguration(), Dependency.withTransitionAndAspects( targetAndConfig.getLabel(), getTopLevelTransition(targetAndConfig), // TODO(bazel-team): support top-level aspects AspectCollection.EMPTY)); } } // Maps <target, originalConfig> pairs to <target, dynamicConfig> pairs for targets that // could be successfully Skyframe-evaluated. Map<TargetAndConfiguration, TargetAndConfiguration> successfullyEvaluatedTargets = new LinkedHashMap<>(); if (!asDeps.isEmpty()) { for (BuildConfiguration fromConfig : asDeps.keySet()) { Multimap<Dependency, BuildConfiguration> trimmedTargets = skyframeExecutor.getConfigurations(eventHandler, fromConfig.getOptions(), asDeps.get(fromConfig)); for (Map.Entry<Dependency, BuildConfiguration> trimmedTarget : trimmedTargets.entries()) { Target target = labelsToTargets.get(trimmedTarget.getKey().getLabel()); successfullyEvaluatedTargets.put( new TargetAndConfiguration(target, fromConfig), new TargetAndConfiguration(target, trimmedTarget.getValue())); } } } LinkedHashSet<TargetAndConfiguration> result = new LinkedHashSet<>(); for (TargetAndConfiguration originalInput : inputs) { if (successfullyEvaluatedTargets.containsKey(originalInput)) { // The configuration was successfully trimmed. result.add(successfullyEvaluatedTargets.get(originalInput)); } else { // Either the configuration couldn't be determined (e.g. loading phase error) or it's null. result.add(originalInput); } } return result; } /** * Returns the transition to apply to the top-level configuration before applying it to this * target. This enables support for rule-triggered top-level configuration hooks. */ private static Attribute.Transition getTopLevelTransition( TargetAndConfiguration targetAndConfig) { Target target = targetAndConfig.getTarget(); BuildConfiguration fromConfig = targetAndConfig.getConfiguration(); Preconditions.checkArgument(fromConfig.useDynamicConfigurations()); // Top-level transitions (chosen by configuration fragments): Transition topLevelTransition = fromConfig.topLevelConfigurationHook(target); if (topLevelTransition == null) { topLevelTransition = ConfigurationTransition.NONE; } // Rule class transitions (chosen by rule class definitions): if (target.getAssociatedRule() == null) { return topLevelTransition; } Rule associatedRule = target.getAssociatedRule(); RuleTransitionFactory transitionFactory = associatedRule.getRuleClassObject().getTransitionFactory(); if (transitionFactory == null) { return topLevelTransition; } Attribute.Transition ruleClassTransition = transitionFactory.buildTransitionFor(associatedRule); if (ruleClassTransition == null) { return topLevelTransition; } else if (topLevelTransition == ConfigurationTransition.NONE) { return ruleClassTransition; } else { return new ComposingSplitTransition(topLevelTransition, ruleClassTransition); } } /** * Gets a dynamic configuration for the given target. * * <p>If {@link BuildConfiguration.Options#trimConfigurations()} is true, the configuration only * includes the fragments needed by the fragment and its transitive closure. Else unconditionally * includes all fragments. */ @VisibleForTesting public BuildConfiguration getDynamicConfigurationForTesting( Target target, BuildConfiguration config, ExtendedEventHandler eventHandler) throws InterruptedException { return Iterables.getOnlyElement(getDynamicConfigurations( ImmutableList.<TargetAndConfiguration>of(new TargetAndConfiguration(target, config)), eventHandler)).getConfiguration(); } /** * Sets the possible artifact roots in the artifact factory. This allows the factory to resolve * paths with unknown roots to artifacts. */ @VisibleForTesting // for BuildViewTestCase public void setArtifactRoots(ImmutableMap<PackageIdentifier, Path> packageRoots) { Map<Path, Root> rootMap = new HashMap<>(); Map<PackageIdentifier, Root> realPackageRoots = new HashMap<>(); for (Map.Entry<PackageIdentifier, Path> entry : packageRoots.entrySet()) { Root root = rootMap.get(entry.getValue()); if (root == null) { root = Root.asSourceRoot(entry.getValue(), entry.getKey().getRepository().isMain()); rootMap.put(entry.getValue(), root); } realPackageRoots.put(entry.getKey(), root); } // Source Artifact roots: getArtifactFactory().setPackageRoots(realPackageRoots); } /** * Tests and clears the current thread's pending "interrupted" status, and * throws InterruptedException iff it was set. */ protected final void pollInterruptedStatus() throws InterruptedException { if (Thread.interrupted()) { throw new InterruptedException(); } } // For testing @VisibleForTesting public Iterable<ConfiguredTarget> getDirectPrerequisitesForTesting( ExtendedEventHandler eventHandler, ConfiguredTarget ct, BuildConfigurationCollection configurations) throws EvalException, InvalidConfigurationException, InterruptedException, InconsistentAspectOrderException { return skyframeExecutor.getConfiguredTargets( eventHandler, ct.getConfiguration(), ImmutableSet.copyOf( getDirectPrerequisiteDependenciesForTesting(eventHandler, ct, configurations).values()), false); } @VisibleForTesting public OrderedSetMultimap<Attribute, Dependency> getDirectPrerequisiteDependenciesForTesting( final ExtendedEventHandler eventHandler, final ConfiguredTarget ct, BuildConfigurationCollection configurations) throws EvalException, InvalidConfigurationException, InterruptedException, InconsistentAspectOrderException { if (!(ct.getTarget() instanceof Rule)) { return OrderedSetMultimap.create(); } class SilentDependencyResolver extends DependencyResolver { @Override protected void invalidVisibilityReferenceHook(TargetAndConfiguration node, Label label) { throw new RuntimeException("bad visibility on " + label + " during testing unexpected"); } @Override protected void invalidPackageGroupReferenceHook(TargetAndConfiguration node, Label label) { throw new RuntimeException("bad package group on " + label + " during testing unexpected"); } @Override protected void missingEdgeHook(Target from, Label to, NoSuchThingException e) { throw new RuntimeException( "missing dependency from " + from.getLabel() + " to " + to + ": " + e.getMessage(), e); } @Override protected Target getTarget(Target from, Label label, NestedSetBuilder<Label> rootCauses) throws InterruptedException { try { return skyframeExecutor.getPackageManager().getTarget(eventHandler, label); } catch (NoSuchThingException e) { throw new IllegalStateException(e); } } @Override protected List<BuildConfiguration> getConfigurations( Set<Class<? extends BuildConfiguration.Fragment>> fragments, Iterable<BuildOptions> buildOptions) { Preconditions.checkArgument(ct.getConfiguration().fragmentClasses().equals(fragments)); Dependency asDep = Dependency.withTransitionAndAspects(ct.getLabel(), Attribute.ConfigurationTransition.NONE, AspectCollection.EMPTY); ImmutableList.Builder<BuildConfiguration> builder = ImmutableList.builder(); for (BuildOptions options : buildOptions) { builder.add(Iterables.getOnlyElement( skyframeExecutor .getConfigurations(eventHandler, options, ImmutableList.<Dependency>of(asDep)) .values() )); } return builder.build(); } } DependencyResolver dependencyResolver = new SilentDependencyResolver(); TargetAndConfiguration ctgNode = new TargetAndConfiguration(ct.getTarget(), ct.getConfiguration()); return dependencyResolver.dependentNodeMap( ctgNode, configurations.getHostConfiguration(), /*aspect=*/ null, getConfigurableAttributeKeysForTesting(eventHandler, ctgNode)); } /** * Returns ConfigMatchingProvider instances corresponding to the configurable attribute keys * present in this rule's attributes. */ private ImmutableMap<Label, ConfigMatchingProvider> getConfigurableAttributeKeysForTesting( ExtendedEventHandler eventHandler, TargetAndConfiguration ctg) { if (!(ctg.getTarget() instanceof Rule)) { return ImmutableMap.of(); } Rule rule = (Rule) ctg.getTarget(); Map<Label, ConfigMatchingProvider> keys = new LinkedHashMap<>(); RawAttributeMapper mapper = RawAttributeMapper.of(rule); for (Attribute attribute : rule.getAttributes()) { for (Label label : mapper.getConfigurabilityKeys(attribute.getName(), attribute.getType())) { if (BuildType.Selector.isReservedLabel(label)) { continue; } ConfiguredTarget ct = getConfiguredTargetForTesting( eventHandler, label, ctg.getConfiguration()); keys.put(label, Preconditions.checkNotNull(ct.getProvider(ConfigMatchingProvider.class))); } } return ImmutableMap.copyOf(keys); } private OrderedSetMultimap<Attribute, ConfiguredTarget> getPrerequisiteMapForTesting( final ExtendedEventHandler eventHandler, ConfiguredTarget target, BuildConfigurationCollection configurations) throws EvalException, InvalidConfigurationException, InterruptedException, InconsistentAspectOrderException { OrderedSetMultimap<Attribute, Dependency> depNodeNames = getDirectPrerequisiteDependenciesForTesting(eventHandler, target, configurations); ImmutableMultimap<Dependency, ConfiguredTarget> cts = skyframeExecutor.getConfiguredTargetMap( eventHandler, target.getConfiguration(), ImmutableSet.copyOf(depNodeNames.values()), false); OrderedSetMultimap<Attribute, ConfiguredTarget> result = OrderedSetMultimap.create(); for (Map.Entry<Attribute, Dependency> entry : depNodeNames.entries()) { result.putAll(entry.getKey(), cts.get(entry.getValue())); } return result; } private Transition getTopLevelTransitionForTarget(Label label, ExtendedEventHandler handler) { Rule rule; try { rule = skyframeExecutor .getPackageManager() .getTarget(handler, label) .getAssociatedRule(); } catch (NoSuchPackageException | NoSuchTargetException e) { return ConfigurationTransition.NONE; } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new AssertionError("Configuration of " + label + " interrupted"); } if (rule == null) { return ConfigurationTransition.NONE; } RuleTransitionFactory factory = rule .getRuleClassObject() .getTransitionFactory(); if (factory == null) { return ConfigurationTransition.NONE; } Transition transition = factory.buildTransitionFor(rule); return (transition == null) ? ConfigurationTransition.NONE : transition; } /** * Returns a configured target for the specified target and configuration. If dynamic * configurations are activated, and the target in question has a top-level rule class transition, * that transition is applied in the returned ConfiguredTarget. Returns {@code null} if something * goes wrong. */ @VisibleForTesting public ConfiguredTarget getConfiguredTargetForTesting( ExtendedEventHandler eventHandler, Label label, BuildConfiguration config) { return skyframeExecutor.getConfiguredTargetForTesting(eventHandler, label, config, getTopLevelTransitionForTarget(label, eventHandler)); } /** * Returns a RuleContext which is the same as the original RuleContext of the target parameter. */ @VisibleForTesting public RuleContext getRuleContextForTesting( ConfiguredTarget target, StoredEventHandler eventHandler, BuildConfigurationCollection configurations) throws EvalException, InvalidConfigurationException, InterruptedException, InconsistentAspectOrderException { BuildConfiguration targetConfig = target.getConfiguration(); CachingAnalysisEnvironment env = new CachingAnalysisEnvironment(getArtifactFactory(), new ConfiguredTargetKey(target.getLabel(), targetConfig), /*isSystemEnv=*/false, targetConfig.extendedSanityChecks(), eventHandler, /*skyframeEnv=*/null, targetConfig.isActionsEnabled()); return getRuleContextForTesting(eventHandler, target, env, configurations); } /** * Creates and returns a rule context that is equivalent to the one that was used to create the * given configured target. */ @VisibleForTesting public RuleContext getRuleContextForTesting(ExtendedEventHandler eventHandler, ConfiguredTarget target, AnalysisEnvironment env, BuildConfigurationCollection configurations) throws EvalException, InvalidConfigurationException, InterruptedException, InconsistentAspectOrderException { BuildConfiguration targetConfig = target.getConfiguration(); return new RuleContext.Builder( env, (Rule) target.getTarget(), ImmutableList.<AspectDescriptor>of(), targetConfig, configurations.getHostConfiguration(), ruleClassProvider.getPrerequisiteValidator(), ((Rule) target.getTarget()).getRuleClassObject().getConfigurationFragmentPolicy()) .setVisibility( NestedSetBuilder.<PackageSpecification>create( Order.STABLE_ORDER, PackageSpecification.everything())) .setPrerequisites(getPrerequisiteMapForTesting(eventHandler, target, configurations)) .setConfigConditions(ImmutableMap.<Label, ConfigMatchingProvider>of()) .setUniversalFragment(ruleClassProvider.getUniversalFragment()) .build(); } /** * For a configured target dependentTarget, returns the desired configured target that is depended * upon. Useful for obtaining the a target with aspects required by the dependent. */ @VisibleForTesting public ConfiguredTarget getPrerequisiteConfiguredTargetForTesting( ExtendedEventHandler eventHandler, ConfiguredTarget dependentTarget, Label desiredTarget, BuildConfigurationCollection configurations) throws EvalException, InvalidConfigurationException, InterruptedException, InconsistentAspectOrderException { Collection<ConfiguredTarget> configuredTargets = getPrerequisiteMapForTesting(eventHandler, dependentTarget, configurations).values(); for (ConfiguredTarget ct : configuredTargets) { if (ct.getLabel().equals(desiredTarget)) { return ct; } } return null; } /** * A converter for loading phase thread count. Since the default is not a true constant, we create * a converter here to implement the default logic. */ public static final class LoadingPhaseThreadCountConverter implements Converter<Integer> { @Override public Integer convert(String input) throws OptionsParsingException { if ("-1".equals(input)) { // Reduce thread count while running tests. Test cases are typically small, and large thread // pools vying for a relatively small number of CPU cores may induce non-optimal // performance. return System.getenv("TEST_TMPDIR") == null ? 200 : 5; } try { int result = Integer.decode(input); if (result < 0) { throw new OptionsParsingException("'" + input + "' must be at least -1"); } return result; } catch (NumberFormatException e) { throw new OptionsParsingException("'" + input + "' is not an int"); } } @Override public String getTypeDescription() { return "an integer"; } } }
/* * Copyright (c) 2000, 2003, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.corba.se.impl.naming.cosnaming; import org.omg.CosNaming.NamingContextExtPackage.*; import java.io.StringWriter; // Import general CORBA classes import org.omg.CORBA.SystemException; import org.omg.CORBA.Object; // Import org.omg.CosNaming types import org.omg.CosNaming.NameComponent; import org.omg.CosNaming.NamingContext; /** * Class InteroperableNamingImpl implements the methods defined * for NamingContextExt which is part of Interoperable Naming * Service specifications. This class is added for doing more * of Parsing and Building of Stringified names according to INS * Spec. */ public class InterOperableNamingImpl { /** * Method which stringifies the Name Components given as the input * parameter. * * @param n Array of Name Components (Simple or Compound Names) * @return string which is the stringified reference. */ public String convertToString( org.omg.CosNaming.NameComponent[] theNameComponents ) { String theConvertedString = convertNameComponentToString( theNameComponents[0] ); String temp; for( int i = 1; i < theNameComponents.length; i++ ) { temp = convertNameComponentToString( theNameComponents[i] ); if( temp != null ) { theConvertedString = theConvertedString + "/" + convertNameComponentToString( theNameComponents[i] ); } } return theConvertedString; } /** This method converts a single Namecomponent to String, By adding Escapes * If neccessary. */ private String convertNameComponentToString( org.omg.CosNaming.NameComponent theNameComponent ) { if( ( ( theNameComponent.id == null ) ||( theNameComponent.id.length() == 0 ) ) &&( ( theNameComponent.kind == null ) ||( theNameComponent.kind.length() == 0 ) ) ) { return "."; } else if( ( theNameComponent.id == null ) ||( theNameComponent.id.length() == 0 ) ) { String kind = addEscape( theNameComponent.kind ); return "." + kind; } else if( ( theNameComponent.kind == null ) ||( theNameComponent.kind.length() == 0 ) ) { String id = addEscape( theNameComponent.id ); return id; } else { String id = addEscape( theNameComponent.id ); String kind = addEscape( theNameComponent.kind ); return (id + "." + kind); } } /** This method adds escape '\' for the Namecomponent if neccessary */ private String addEscape( String value ) { StringBuffer theNewValue; if( (value != null) && ( (value.indexOf('.') != -1 ) || (value.indexOf('/') != -1))) { char c; theNewValue = new StringBuffer( ); for( int i = 0; i < value.length( ); i++ ) { c = value.charAt( i ); if( ( c != '.' ) && (c != '/' ) ) { theNewValue.append( c ); } else { // Adding escape for the "." theNewValue.append( '\\' ); theNewValue.append( c ); } } } else { return value; } return new String( theNewValue ); } /** * Method which converts the Stringified name into Array of Name Components. * * @param string which is the stringified name. * @return Array of Name Components (Simple or Compound Names) */ public org.omg.CosNaming.NameComponent[] convertToNameComponent( String theStringifiedName ) throws org.omg.CosNaming.NamingContextPackage.InvalidName { String[] theStringifiedNameComponents = breakStringToNameComponents( theStringifiedName ); if( ( theStringifiedNameComponents == null ) || (theStringifiedNameComponents.length == 0 ) ) { return null; } NameComponent[] theNameComponents = new NameComponent[theStringifiedNameComponents.length]; for( int i = 0; i < theStringifiedNameComponents.length; i++ ) { theNameComponents[i] = createNameComponentFromString( theStringifiedNameComponents[i] ); } return theNameComponents; } /** Step1 in converting Stringified name into array of Name Component * is breaking the String into multiple name components */ private String[] breakStringToNameComponents( String theStringifiedName ) { int[] theIndices = new int[100]; int theIndicesIndex = 0; for(int index = 0; index <= theStringifiedName.length(); ) { theIndices[theIndicesIndex] = theStringifiedName.indexOf( '/', index ); if( theIndices[theIndicesIndex] == -1 ) { // This is the end of all the occurence of '/' and hence come // out of the loop index = theStringifiedName.length()+1; } else { // If the '/' is found, first check whether it is // preceded by escape '\' // If not then set theIndices and increment theIndicesIndex // and also set the index else just ignore the '/' if( (theIndices[theIndicesIndex] > 0 ) && (theStringifiedName.charAt( theIndices[theIndicesIndex]-1) == '\\') ) { index = theIndices[theIndicesIndex] + 1; theIndices[theIndicesIndex] = -1; } else { index = theIndices[theIndicesIndex] + 1; theIndicesIndex++; } } } if( theIndicesIndex == 0 ) { String[] tempString = new String[1]; tempString[0] = theStringifiedName; return tempString; } if( theIndicesIndex != 0 ) { theIndicesIndex++; } return StringComponentsFromIndices( theIndices, theIndicesIndex, theStringifiedName ); } /** This method breaks one big String into multiple substrings based * on the array of index passed in. */ private String[] StringComponentsFromIndices( int[] theIndices, int indicesCount, String theStringifiedName ) { String[] theStringComponents = new String[indicesCount]; int firstIndex = 0; int lastIndex = theIndices[0]; for( int i = 0; i < indicesCount; i++ ) { theStringComponents[i] = theStringifiedName.substring( firstIndex, lastIndex ); if( ( theIndices[i] < theStringifiedName.length() - 1 ) &&( theIndices[i] != -1 ) ) { firstIndex = theIndices[i]+1; } else { firstIndex = 0; i = indicesCount; } if( (i+1 < theIndices.length) && (theIndices[i+1] < (theStringifiedName.length() - 1)) && (theIndices[i+1] != -1) ) { lastIndex = theIndices[i+1]; } else { i = indicesCount; } // This is done for the last component if( firstIndex != 0 && i == indicesCount ) { theStringComponents[indicesCount-1] = theStringifiedName.substring( firstIndex ); } } return theStringComponents; } /** Step 2: After Breaking the Stringified name into set of NameComponent * Strings, The next step is to create Namecomponents from the substring * by removing the escapes if there are any. */ private NameComponent createNameComponentFromString( String theStringifiedNameComponent ) throws org.omg.CosNaming.NamingContextPackage.InvalidName { String id = null; String kind = null; if( ( theStringifiedNameComponent == null ) || ( theStringifiedNameComponent.length( ) == 0) || ( theStringifiedNameComponent.endsWith(".") ) ) { // If any of the above is true, then we create an invalid Name // Component to indicate that it is an invalid name. throw new org.omg.CosNaming.NamingContextPackage.InvalidName( ); } int index = theStringifiedNameComponent.indexOf( '.', 0 ); // The format could be XYZ (Without kind) if( index == -1 ) { id = theStringifiedNameComponent; } // The format is .XYZ (Without ID) else if( index == 0 ) { // This check is for the Namecomponent which is just "." meaning Id // and Kinds are null if( theStringifiedNameComponent.length( ) != 1 ) { kind = theStringifiedNameComponent.substring(1); } } else { if( theStringifiedNameComponent.charAt(index-1) != '\\' ) { id = theStringifiedNameComponent.substring( 0, index); kind = theStringifiedNameComponent.substring( index + 1 ); } else { boolean kindfound = false; while( (index < theStringifiedNameComponent.length() ) &&( kindfound != true ) ) { index = theStringifiedNameComponent.indexOf( '.',index + 1); if( index > 0 ) { if( theStringifiedNameComponent.charAt( index - 1 ) != '\\' ) { kindfound = true; } } else { // No more '.', which means there is no Kind index = theStringifiedNameComponent.length(); } } if( kindfound == true ) { id = theStringifiedNameComponent.substring( 0, index); kind = theStringifiedNameComponent.substring(index + 1 ); } else { id = theStringifiedNameComponent; } } } id = cleanEscapeCharacter( id ); kind = cleanEscapeCharacter( kind ); if( id == null ) { id = ""; } if( kind == null ) { kind = ""; } return new NameComponent( id, kind ); } /** This method cleans the escapes in the Stringified name and returns the * correct String */ private String cleanEscapeCharacter( String theString ) { if( ( theString == null ) || (theString.length() == 0 ) ) { return theString; } int index = theString.indexOf( '\\' ); if( index == 0 ) { return theString; } else { StringBuffer src = new StringBuffer( theString ); StringBuffer dest = new StringBuffer( ); char c; for( int i = 0; i < theString.length( ); i++ ) { c = src.charAt( i ); if( c != '\\' ) { dest.append( c ); } else { if( i+1 < theString.length() ) { char d = src.charAt( i + 1 ); // If there is a AlphaNumeric character after a \ // then include slash, as it is not intended as an // escape character. if( Character.isLetterOrDigit(d) ) { dest.append( c ); } } } } return new String(dest); } } /** * Method which converts the Stringified name and Host Name Address into * a URL based Name * * @param address which is ip based host name * @param name which is the stringified name. * @return url based Name. */ public String createURLBasedAddress( String address, String name ) throws InvalidAddress { String theurl = null; if( ( address == null ) ||( address.length() == 0 ) ) { throw new InvalidAddress(); } else { theurl = "corbaname:" + address + "#" + encode( name ); } return theurl; } /** Encodes the string according to RFC 2396 IETF spec required by INS. */ private String encode( String stringToEncode ) { StringWriter theStringAfterEscape = new StringWriter(); int byteCount = 0; for( int i = 0; i < stringToEncode.length(); i++ ) { char c = stringToEncode.charAt( i ) ; if( Character.isLetterOrDigit( c ) ) { theStringAfterEscape.write( c ); } // Do no Escape for characters in this list // RFC 2396 else if((c == ';') || (c == '/') || (c == '?') || (c == ':') || (c == '@') || (c == '&') || (c == '=') || (c == '+') || (c == '$') || (c == ';') || (c == '-') || (c == '_') || (c == '.') || (c == '!') || (c == '~') || (c == '*') || (c == ' ') || (c == '(') || (c == ')') ) { theStringAfterEscape.write( c ); } else { // Add escape theStringAfterEscape.write( '%' ); String hexString = Integer.toHexString( (int) c ); theStringAfterEscape.write( hexString ); } } return theStringAfterEscape.toString(); } }
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package ai.vespa.rankingexpression.importer.operations; import ai.vespa.rankingexpression.importer.OrderedTensorType; import com.yahoo.searchlib.rankingexpression.Reference; import com.yahoo.searchlib.rankingexpression.evaluation.DoubleValue; import ai.vespa.rankingexpression.importer.DimensionRenamer; import com.yahoo.searchlib.rankingexpression.evaluation.Value; import com.yahoo.searchlib.rankingexpression.rule.ArithmeticNode; import com.yahoo.searchlib.rankingexpression.rule.ArithmeticOperator; import com.yahoo.searchlib.rankingexpression.rule.ConstantNode; import com.yahoo.searchlib.rankingexpression.rule.EmbracedNode; import com.yahoo.searchlib.rankingexpression.rule.ExpressionNode; import com.yahoo.searchlib.rankingexpression.rule.Function; import com.yahoo.searchlib.rankingexpression.rule.FunctionNode; import com.yahoo.searchlib.rankingexpression.rule.ReferenceNode; import com.yahoo.searchlib.rankingexpression.rule.TensorFunctionNode; import com.yahoo.tensor.Tensor; import com.yahoo.tensor.TensorType; import com.yahoo.tensor.functions.Generate; import com.yahoo.tensor.functions.TensorFunction; import java.util.ArrayList; import java.util.List; import java.util.Optional; import static com.yahoo.searchlib.rankingexpression.rule.TensorFunctionNode.wrapScalar; public class Reshape extends IntermediateOperation { private final AttributeMap attributeMap; public Reshape(String modelName, String nodeName, List<IntermediateOperation> inputs, AttributeMap attributeMap) { super(modelName, nodeName, inputs); this.attributeMap = attributeMap; } @Override protected OrderedTensorType lazyGetType() { // required as we use tensor create inputs.get(0).exportAsRankingFunction = true; if (inputs.size() == 2) { return typeWithShapeAsInput(); } else if (inputs.size() == 1) { return typeWithShapeAsAttribute(); } throw new IllegalArgumentException("Expected 2 or 3 inputs for '" + name + "', got " + inputs.size()); } private OrderedTensorType typeWithShapeAsInput() { IntermediateOperation newShape = inputs.get(1); if (newShape.getConstantValue().isEmpty()) throw new IllegalArgumentException("Reshape " + name + ": Shape input must be a constant."); OrderedTensorType inputType = inputs.get(0).type().get(); Tensor shape = newShape.getConstantValue().get().asTensor(); List<Integer> dimSizes = new ArrayList<>(shape.type().rank()); shape.valueIterator().forEachRemaining(v -> dimSizes.add(v.intValue())); // first pass - set 0 values, meaning that size is retained from input for (int i = 0; i < dimSizes.size(); ++i) { if (dimSizes.get(i) == 0) { if (i >= inputType.dimensions().size()) { throw new IllegalArgumentException("Reshape " + name + ": 0 value for dimension not found in input"); } dimSizes.set(i, inputType.dimensions().get(i).size().get().intValue()); } } // second pass - set any -1 value, meaning that the dimension size should be expanded to fill the tensor for (int i = 0; i < dimSizes.size(); ++i) { if (dimSizes.get(i) < 0) { int shapeSize = dimSizes.stream().reduce(1, (a, b) -> a * b); int tensorSize = OrderedTensorType.tensorSize(inputType.type()).intValue(); dimSizes.set(i, -1 * tensorSize / (shapeSize == 0 ? -1 : shapeSize)); } } return buildOutputType(dimSizes); } private OrderedTensorType typeWithShapeAsAttribute() { if (attributeMap.getList("shape").isEmpty() || attributeMap.getList("shape").get().size() == 0) throw new IllegalArgumentException("Reshape in " + name + ": Shape attribute is empty."); OrderedTensorType inputType = inputs.get(0).type().get(); List<Value> shape = attributeMap.getList("shape").get(); List<Integer> dimSizes = new ArrayList<>(shape.size()); for (Value v : shape) { int size = (int) v.asDouble(); if (size < 0) { int shapeSize = (int) shape.stream().mapToDouble(Value::asDouble).reduce(1, (a, b) -> a * b); int tensorSize = OrderedTensorType.tensorSize(inputType.type()).intValue(); size = -1 * shapeSize / tensorSize; } dimSizes.add(size); } return buildOutputType(dimSizes); } private OrderedTensorType buildOutputType(List<Integer> dimSizes) { OrderedTensorType.Builder outputTypeBuilder = new OrderedTensorType.Builder(resultValueType()); for (int i = 0; i < dimSizes.size(); ++i) { outputTypeBuilder.add(TensorType.Dimension.indexed(String.format("%s_%d", vespaName(), i), dimSizes.get(i))); } return outputTypeBuilder.build(); } @Override protected TensorFunction<Reference> lazyGetFunction() { if ( ! inputs.stream().map(IntermediateOperation::type).allMatch(Optional::isPresent) ) return null; if ( ! inputs.stream().map(IntermediateOperation::function).allMatch(Optional::isPresent) ) return null; OrderedTensorType inputType = inputs.get(0).type().get(); TensorFunction<Reference> inputFunction = inputs.get(0).function().get(); return reshape(inputFunction, inputType, type); } @Override public void addDimensionNameConstraints(DimensionRenamer renamer) { addConstraintsFrom(type, renamer); } @Override public Reshape withInputs(List<IntermediateOperation> inputs) { return new Reshape(modelName(), name(), inputs, attributeMap); } public TensorFunction<Reference> reshape(TensorFunction<Reference> inputFunction, OrderedTensorType inputType, OrderedTensorType outputType) { if ( ! OrderedTensorType.tensorSize(inputType.type()).equals(OrderedTensorType.tensorSize(outputType.type()))) throw new IllegalArgumentException("New and old shape of tensor must have the same size when reshaping"); IntermediateOperation input = inputs.get(0); String inputFunctionName = input.rankingExpressionFunctionName(); List<com.yahoo.tensor.functions.Slice.DimensionValue<Reference>> dimensionValues = new ArrayList<>(); // Conceptually, reshaping consists on unrolling a tensor to an array using the dimension order, // then use the dimension order of the new shape to roll back into a tensor. ExpressionNode unrolled = new EmbracedNode(unrollTensorExpression(outputType)); long innerSize = 1; for (int dim = 0; dim < inputType.rank(); ++dim) { innerSize *= inputType.dimensions().get(dim).size().get(); } for (int dim = 0; dim < inputType.rank(); ++dim) { String inputDimensionName = inputType.dimensions().get(dim).name(); long inputDimensionSize = inputType.dimensions().get(dim).size().get(); long previousInnerSize = innerSize; innerSize /= inputDimensionSize; ExpressionNode inputDimensionExpression; if (inputDimensionSize == 1) { inputDimensionExpression = new EmbracedNode(new ConstantNode(DoubleValue.zero)); } else if (dim == (inputType.rank() - 1)) { ExpressionNode size = new ConstantNode(new DoubleValue(inputDimensionSize)); ExpressionNode div = new ArithmeticNode(unrolled, ArithmeticOperator.MODULO, size); inputDimensionExpression = new EmbracedNode(div); } else { ExpressionNode size = new ConstantNode(new DoubleValue(innerSize)); ExpressionNode previousSize = new ConstantNode(new DoubleValue(previousInnerSize)); ExpressionNode mod = new ArithmeticNode(unrolled, ArithmeticOperator.MODULO, previousSize); ExpressionNode div = new ArithmeticNode(new EmbracedNode(mod), ArithmeticOperator.DIVIDE, size); inputDimensionExpression = new EmbracedNode(div); } dimensionValues.add(new com.yahoo.tensor.functions.Slice.DimensionValue<>(Optional.of(inputDimensionName), wrapScalar(inputDimensionExpression))); } TensorFunction<Reference> inputIndices = new TensorFunctionNode.ExpressionTensorFunction(new ReferenceNode(inputFunctionName)); com.yahoo.tensor.functions.Slice<Reference> sliceIndices = new com.yahoo.tensor.functions.Slice<>(inputIndices, dimensionValues); ExpressionNode sliceExpression = new TensorFunctionNode(sliceIndices); return Generate.bound(outputType.type(), wrapScalar(sliceExpression)); } private static ExpressionNode unrollTensorExpression(OrderedTensorType type) { if (type.rank() == 0) return new ConstantNode(DoubleValue.zero); List<ExpressionNode> children = new ArrayList<>(); List<ArithmeticOperator> operators = new ArrayList<>(); int size = 1; for (int i = type.dimensions().size() - 1; i >= 0; --i) { TensorType.Dimension dimension = type.dimensions().get(i); children.add(0, new ReferenceNode(dimension.name())); if (size > 1) { operators.add(0, ArithmeticOperator.MULTIPLY); children.add(0, new ConstantNode(new DoubleValue(size))); } size *= OrderedTensorType.dimensionSize(dimension); if (i > 0) { operators.add(0, ArithmeticOperator.PLUS); } } return new ArithmeticNode(children, operators); } @Override public String operationName() { return "Reshape"; } }
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.d; import com.facebook.buck.cxx.CxxBuckConfig; import com.facebook.buck.cxx.CxxLink; import com.facebook.buck.cxx.CxxLinkableEnhancer; import com.facebook.buck.cxx.CxxPlatform; import com.facebook.buck.cxx.Linker; import com.facebook.buck.cxx.NativeLinkable; import com.facebook.buck.cxx.NativeLinkableInput; import com.facebook.buck.graph.AbstractBreadthFirstTraversal; import com.facebook.buck.io.MorePaths; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Flavor; import com.facebook.buck.model.ImmutableFlavor; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildTargetSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.SourcePathRuleFinder; import com.facebook.buck.rules.SymlinkTree; import com.facebook.buck.rules.Tool; import com.facebook.buck.rules.args.SourcePathArg; import com.facebook.buck.rules.args.StringArg; import com.facebook.buck.rules.coercer.SourceList; import com.facebook.buck.util.MoreMaps; import com.google.common.base.Preconditions; import com.google.common.base.Suppliers; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import java.util.Map; import java.util.Optional; import java.util.TreeMap; /** * Utility functions for use in D Descriptions. */ abstract class DDescriptionUtils { public static final Flavor SOURCE_LINK_TREE = ImmutableFlavor.of("source-link-tree"); /** * Creates a BuildTarget, based on an existing build target, but flavored with a CxxPlatform * and an additional flavor created by combining a prefix and an output file name. * * @param existingTarget the existing target * @param flavorPrefix prefix to be used for added flavor * @param fileName filename to be used for added flavor * @param cxxPlatform the C++ platform to compile for * @return the new BuildTarget */ public static BuildTarget createBuildTargetForFile( BuildTarget existingTarget, String flavorPrefix, String fileName, CxxPlatform cxxPlatform) { return BuildTarget.builder(existingTarget) .addFlavors( cxxPlatform.getFlavor(), ImmutableFlavor.of( flavorPrefix + Flavor.replaceInvalidCharacters(fileName))) .build(); } /** * Creates a new BuildTarget, based on an existing target, for a file to be compiled. * @param existingTarget the existing target * @param src the source file to be compiled * @param cxxPlatform the C++ platform to compile the file for * @return a BuildTarget to compile a D source file to an object file */ public static BuildTarget createDCompileBuildTarget( BuildTarget existingTarget, String src, CxxPlatform cxxPlatform) { return createBuildTargetForFile( existingTarget, "compile-", DCompileStep.getObjectNameForSourceName(src), cxxPlatform); } /** * Creates a {@link com.facebook.buck.cxx.NativeLinkable} using sources compiled by * the D compiler. * * @param params build parameters for the build target * @param sources source files to compile * @param compilerFlags flags to pass to the compiler * @param buildRuleResolver resolver for build rules * @param cxxPlatform the C++ platform to compile for * @param dBuckConfig the Buck configuration for D * @return the new build rule */ public static CxxLink createNativeLinkable( BuildRuleParams params, BuildRuleResolver buildRuleResolver, CxxPlatform cxxPlatform, DBuckConfig dBuckConfig, CxxBuckConfig cxxBuckConfig, ImmutableList<String> compilerFlags, SourceList sources, ImmutableList<String> linkerFlags, DIncludes includes) throws NoSuchBuildTargetException { BuildTarget buildTarget = params.getBuildTarget(); SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(buildRuleResolver); SourcePathResolver sourcePathResolver = new SourcePathResolver(ruleFinder); ImmutableList<SourcePath> sourcePaths = sourcePathsForCompiledSources( params, buildRuleResolver, sourcePathResolver, ruleFinder, cxxPlatform, dBuckConfig, compilerFlags, sources, includes); // Return a rule to link the .o for the binary together with its // dependencies. return CxxLinkableEnhancer.createCxxLinkableBuildRule( cxxBuckConfig, cxxPlatform, params, buildRuleResolver, sourcePathResolver, ruleFinder, buildTarget, Linker.LinkType.EXECUTABLE, Optional.empty(), BuildTargets.getGenPath( params.getProjectFilesystem(), buildTarget, "%s/" + buildTarget.getShortName()), Linker.LinkableDepType.STATIC, FluentIterable.from(params.getDeps()) .filter(NativeLinkable.class), /* cxxRuntimeType */ Optional.empty(), /* bundleLoader */ Optional.empty(), ImmutableSet.of(), NativeLinkableInput.builder() .addAllArgs(StringArg.from(dBuckConfig.getLinkerFlags())) .addAllArgs(StringArg.from(linkerFlags)) .addAllArgs(SourcePathArg.from(sourcePathResolver, sourcePaths)) .build()); } public static BuildTarget getSymlinkTreeTarget(BuildTarget baseTarget) { return BuildTarget.builder(baseTarget) .addFlavors(SOURCE_LINK_TREE) .build(); } public static SymlinkTree createSourceSymlinkTree( BuildTarget target, BuildRuleParams baseParams, SourcePathResolver pathResolver, SourceList sources) { Preconditions.checkState(target.getFlavors().contains(SOURCE_LINK_TREE)); return new SymlinkTree( baseParams.copyWithChanges( target, Suppliers.ofInstance(ImmutableSortedSet.of()), Suppliers.ofInstance(ImmutableSortedSet.of())), pathResolver, baseParams.getProjectFilesystem().resolve( BuildTargets.getGenPath( baseParams.getProjectFilesystem(), baseParams.getBuildTarget(), "%s")), MoreMaps.transformKeys( sources.toNameMap( baseParams.getBuildTarget(), pathResolver, "srcs"), MorePaths.toPathFn(baseParams.getProjectFilesystem().getRootPath().getFileSystem()))); } private static ImmutableMap<BuildTarget, DLibrary> getTransitiveDLibraryRules( Iterable<? extends BuildRule> inputs) { final ImmutableMap.Builder<BuildTarget, DLibrary> libraries = ImmutableMap.builder(); new AbstractBreadthFirstTraversal<BuildRule>(inputs) { @Override public ImmutableSet<BuildRule> visit(BuildRule rule) { if (rule instanceof DLibrary) { libraries.put(rule.getBuildTarget(), (DLibrary) rule); return rule.getDeps(); } return ImmutableSet.of(); } }.start(); return libraries.build(); } /** * Ensures that a DCompileBuildRule exists for the given target, creating a DCompileBuildRule * if neccesary. * @param baseParams build parameters for the rule * @param buildRuleResolver BuildRuleResolver the rule should be in * @param sourcePathResolver used to resolve source paths * @param src the source file to be compiled * @param compilerFlags flags to pass to the compiler * @param compileTarget the target the rule should be for * @param dBuckConfig the Buck configuration for D * @return the build rule */ public static DCompileBuildRule requireBuildRule( BuildTarget compileTarget, BuildRuleParams baseParams, BuildRuleResolver buildRuleResolver, SourcePathResolver sourcePathResolver, SourcePathRuleFinder ruleFinder, DBuckConfig dBuckConfig, ImmutableList<String> compilerFlags, String name, SourcePath src, DIncludes includes) throws NoSuchBuildTargetException { Optional<BuildRule> existingRule = buildRuleResolver.getRuleOptional(compileTarget); if (existingRule.isPresent()) { return (DCompileBuildRule) existingRule.get(); } else { Tool compiler = dBuckConfig.getDCompiler(); Map<BuildTarget, DIncludes> transitiveIncludes = new TreeMap<>(); transitiveIncludes.put(baseParams.getBuildTarget(), includes); for (Map.Entry<BuildTarget, DLibrary> library : getTransitiveDLibraryRules(baseParams.getDeps()).entrySet()) { transitiveIncludes.put(library.getKey(), library.getValue().getIncludes()); } ImmutableSortedSet.Builder<BuildRule> depsBuilder = ImmutableSortedSet.naturalOrder(); depsBuilder.addAll(compiler.getDeps(ruleFinder)); depsBuilder.addAll(ruleFinder.filterBuildRuleInputs(src)); for (DIncludes dIncludes : transitiveIncludes.values()) { depsBuilder.addAll(dIncludes.getDeps(ruleFinder)); } ImmutableSortedSet<BuildRule> deps = depsBuilder.build(); return buildRuleResolver.addToIndex( new DCompileBuildRule( baseParams.copyWithChanges( compileTarget, Suppliers.ofInstance(deps), Suppliers.ofInstance(ImmutableSortedSet.of())), sourcePathResolver, compiler, ImmutableList.<String>builder() .addAll(dBuckConfig.getBaseCompilerFlags()) .addAll(compilerFlags) .build(), name, ImmutableSortedSet.of(src), ImmutableList.copyOf(transitiveIncludes.values()))); } } /** * Generates BuildTargets and BuildRules to compile D sources to object files, and * returns a list of SourcePaths referring to the generated object files. * @param sources source files to compile * @param compilerFlags flags to pass to the compiler * @param baseParams build parameters for the compilation * @param buildRuleResolver resolver for build rules * @param sourcePathResolver resolver for source paths * @param cxxPlatform the C++ platform to compile for * @param dBuckConfig the Buck configuration for D * @return SourcePaths of the generated object files */ public static ImmutableList<SourcePath> sourcePathsForCompiledSources( BuildRuleParams baseParams, BuildRuleResolver buildRuleResolver, SourcePathResolver sourcePathResolver, SourcePathRuleFinder ruleFinder, CxxPlatform cxxPlatform, DBuckConfig dBuckConfig, ImmutableList<String> compilerFlags, SourceList sources, DIncludes includes) throws NoSuchBuildTargetException { ImmutableList.Builder<SourcePath> sourcePaths = ImmutableList.builder(); for (Map.Entry<String, SourcePath> source : sources.toNameMap(baseParams.getBuildTarget(), sourcePathResolver, "srcs").entrySet()) { BuildTarget compileTarget = createDCompileBuildTarget( baseParams.getBuildTarget(), source.getKey(), cxxPlatform); requireBuildRule( compileTarget, baseParams, buildRuleResolver, sourcePathResolver, ruleFinder, dBuckConfig, compilerFlags, source.getKey(), source.getValue(), includes); sourcePaths.add(new BuildTargetSourcePath(compileTarget)); } return sourcePaths.build(); } }
/* * The MIT License * * Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package hudson; import com.gargoylesoftware.htmlunit.html.HtmlForm; import com.gargoylesoftware.htmlunit.html.HtmlPage; import hudson.PluginManager.UberClassLoader; import hudson.model.Hudson; import hudson.model.UpdateCenter; import hudson.model.UpdateCenter.UpdateCenterJob; import hudson.model.UpdateSite; import hudson.model.User; import hudson.security.ACL; import hudson.security.ACLContext; import hudson.util.FormValidation; import hudson.util.PersistedList; import java.io.File; import java.io.IOException; import java.lang.reflect.Method; import java.net.URL; import java.net.URLClassLoader; import java.util.Collections; import java.util.List; import java.util.concurrent.Future; import jenkins.RestartRequiredException; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.commons.io.FileUtils; import org.apache.tools.ant.filters.StringInputStream; import static org.junit.Assert.*; import static org.junit.Assume.assumeFalse; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.jvnet.hudson.test.Issue; import org.jvnet.hudson.test.JenkinsRule; import org.jvnet.hudson.test.MockAuthorizationStrategy; import org.jvnet.hudson.test.Url; import org.jvnet.hudson.test.recipes.WithPlugin; import org.jvnet.hudson.test.recipes.WithPluginManager; /** * @author Kohsuke Kawaguchi */ public class PluginManagerTest { @Rule public JenkinsRule r = PluginManagerUtil.newJenkinsRule(); @Rule public TemporaryFolder tmp = new TemporaryFolder(); /** * Manual submission form. */ @Test public void uploadJpi() throws Exception { HtmlPage page = r.createWebClient().goTo("pluginManager/advanced"); HtmlForm f = page.getFormByName("uploadPlugin"); File dir = tmp.newFolder(); File plugin = new File(dir, "tasks.jpi"); FileUtils.copyURLToFile(getClass().getClassLoader().getResource("plugins/tasks.jpi"),plugin); f.getInputByName("name").setValueAttribute(plugin.getAbsolutePath()); r.submit(f); assertTrue( new File(r.jenkins.getRootDir(),"plugins/tasks.jpi").exists() ); } /** * Manual submission form. */ @Test public void uploadHpi() throws Exception { HtmlPage page = r.createWebClient().goTo("pluginManager/advanced"); HtmlForm f = page.getFormByName("uploadPlugin"); File dir = tmp.newFolder(); File plugin = new File(dir, "legacy.hpi"); FileUtils.copyURLToFile(getClass().getClassLoader().getResource("plugins/legacy.hpi"),plugin); f.getInputByName("name").setValueAttribute(plugin.getAbsolutePath()); r.submit(f); // uploaded legacy plugins get renamed to *.jpi assertTrue( new File(r.jenkins.getRootDir(),"plugins/legacy.jpi").exists() ); } /** * Tests the effect of {@link WithPlugin}. */ @WithPlugin("tasks.jpi") @Test public void withRecipeJpi() throws Exception { assertNotNull(r.jenkins.getPlugin("tasks")); } /** * Tests the effect of {@link WithPlugin}. */ @WithPlugin("legacy.hpi") @Test public void withRecipeHpi() throws Exception { assertNotNull(r.jenkins.getPlugin("legacy")); } /** * Makes sure that plugins can see Maven2 plugin that's refactored out in 1.296. */ @WithPlugin("tasks.jpi") @Test public void optionalMavenDependency() throws Exception { PluginWrapper.Dependency m2=null; PluginWrapper tasks = r.jenkins.getPluginManager().getPlugin("tasks"); for( PluginWrapper.Dependency d : tasks.getOptionalDependencies() ) { if(d.shortName.equals("maven-plugin")) { assertNull(m2); m2 = d; } } assertNotNull(m2); // this actually doesn't really test what we need, though, because // I thought test harness is loading the maven classes by itself. // TODO: write a separate test that tests the optional dependency loading tasks.classLoader.loadClass(hudson.maven.agent.AbortException.class.getName()); } /** * Verifies that by the time {@link Plugin#start()} is called, uber classloader is fully functioning. * This is necessary as plugin start method can engage in XStream loading activities, and they should * resolve all the classes in the system (for example, a plugin X can define an extension point * other plugins implement, so when X loads its config it better sees all the implementations defined elsewhere) */ @WithPlugin("tasks.jpi") @WithPluginManager(PluginManagerImpl_for_testUberClassLoaderIsAvailableDuringStart.class) @Test public void uberClassLoaderIsAvailableDuringStart() { assertTrue(((PluginManagerImpl_for_testUberClassLoaderIsAvailableDuringStart) r.jenkins.pluginManager).tested); } public static class PluginManagerImpl_for_testUberClassLoaderIsAvailableDuringStart extends LocalPluginManager { boolean tested; public PluginManagerImpl_for_testUberClassLoaderIsAvailableDuringStart(File rootDir) { super(rootDir); } @Override protected PluginStrategy createPluginStrategy() { return new ClassicPluginStrategy(this) { @Override public void startPlugin(PluginWrapper plugin) throws Exception { tested = true; // plugins should be already visible in the UberClassLoader assertTrue(!activePlugins.isEmpty()); uberClassLoader.loadClass("hudson.plugins.tasks.Messages"); super.startPlugin(plugin); } }; } } /** * Makes sure that thread context classloader isn't used by {@link UberClassLoader}, or else * infinite cycle ensues. */ @Url("http://jenkins.361315.n4.nabble.com/channel-example-and-plugin-classes-gives-ClassNotFoundException-td3756092.html") @Test public void uberClassLoaderDoesntUseContextClassLoader() throws Exception { Thread t = Thread.currentThread(); URLClassLoader ucl = new URLClassLoader(new URL[0], r.jenkins.pluginManager.uberClassLoader); ClassLoader old = t.getContextClassLoader(); t.setContextClassLoader(ucl); try { try { ucl.loadClass("No such class"); fail(); } catch (ClassNotFoundException e) { // as expected } ucl.loadClass(Hudson.class.getName()); } finally { t.setContextClassLoader(old); } } @Test public void installWithoutRestart() throws Exception { URL res = getClass().getClassLoader().getResource("plugins/htmlpublisher.jpi"); File f = new File(r.jenkins.getRootDir(), "plugins/htmlpublisher.jpi"); FileUtils.copyURLToFile(res, f); r.jenkins.pluginManager.dynamicLoad(f); Class c = r.jenkins.getPluginManager().uberClassLoader.loadClass("htmlpublisher.HtmlPublisher$DescriptorImpl"); assertNotNull(r.jenkins.getDescriptorByType(c)); } @Test public void prevalidateConfig() throws Exception { assumeFalse("TODO: Implement this test on Windows", Functions.isWindows()); PersistedList<UpdateSite> sites = r.jenkins.getUpdateCenter().getSites(); sites.clear(); URL url = PluginManagerTest.class.getResource("/plugins/tasks-update-center.json"); UpdateSite site = new UpdateSite(UpdateCenter.ID_DEFAULT, url.toString()); sites.add(site); assertEquals(FormValidation.ok(), site.updateDirectly(false).get()); assertNotNull(site.getData()); assertEquals(Collections.emptyList(), r.jenkins.getPluginManager().prevalidateConfig(new StringInputStream("<whatever><runant plugin=\"ant@1.1\"/></whatever>"))); assertNull(r.jenkins.getPluginManager().getPlugin("tasks")); List<Future<UpdateCenterJob>> jobs = r.jenkins.getPluginManager().prevalidateConfig(new StringInputStream("<whatever><tasks plugin=\"tasks@2.23\"/></whatever>")); assertEquals(1, jobs.size()); UpdateCenterJob job = jobs.get(0).get(); // blocks for completion assertEquals("InstallationJob", job.getType()); UpdateCenter.InstallationJob ijob = (UpdateCenter.InstallationJob) job; assertEquals("tasks", ijob.plugin.name); assertNotNull(r.jenkins.getPluginManager().getPlugin("tasks")); // TODO restart scheduled (SuccessButRequiresRestart) after upgrade or Support-Dynamic-Loading: false // TODO dependencies installed or upgraded too // TODO required plugin installed but inactive } // plugin "depender" optionally depends on plugin "dependee". // they are written like this: // org.jenkinsci.plugins.dependencytest.dependee: // public class Dependee { // public static String getValue() { // return "dependee"; // } // } // // public abstract class DependeeExtensionPoint implements ExtensionPoint { // } // // org.jenkinsci.plugins.dependencytest.depender: // public class Depender { // public static String getValue() { // if (Jenkins.getInstance().getPlugin("dependee") != null) { // return Dependee.getValue(); // } // return "depender"; // } // } // // @Extension(optional=true) // public class DependerExtension extends DependeeExtensionPoint { // } /** * call org.jenkinsci.plugins.dependencytest.depender.Depender.getValue(). * * @return * @throws Exception */ private String callDependerValue() throws Exception { Class<?> c = r.jenkins.getPluginManager().uberClassLoader.loadClass("org.jenkinsci.plugins.dependencytest.depender.Depender"); Method m = c.getMethod("getValue"); return (String)m.invoke(null); } /** * Load "dependee" and then load "depender". * Asserts that "depender" can access to "dependee". * * @throws Exception */ @Test public void installDependingPluginWithoutRestart() throws Exception { // Load dependee. { dynamicLoad("dependee.hpi"); } // before load depender, of course failed to call Depender.getValue() try { callDependerValue(); fail(); } catch (ClassNotFoundException _) { } // No extensions exist. assertTrue(r.jenkins.getExtensionList("org.jenkinsci.plugins.dependencytest.dependee.DependeeExtensionPoint").isEmpty()); // Load depender. { dynamicLoad("depender.hpi"); } // depender successfully accesses to dependee. assertEquals("dependee", callDependerValue()); // Extension in depender is loaded. assertFalse(r.jenkins.getExtensionList("org.jenkinsci.plugins.dependencytest.dependee.DependeeExtensionPoint").isEmpty()); } /** * Load "depender" and then load "dependee". * Asserts that "depender" can access to "dependee". * * @throws Exception */ @Issue("JENKINS-19976") @Test public void installDependedPluginWithoutRestart() throws Exception { // Load depender. { dynamicLoad("depender.hpi"); } // before load dependee, depender does not access to dependee. assertEquals("depender", callDependerValue()); // before load dependee, of course failed to list extensions for dependee. try { r.jenkins.getExtensionList("org.jenkinsci.plugins.dependencytest.dependee.DependeeExtensionPoint"); fail(); } catch( ClassNotFoundException _ ){ } // Load dependee. { dynamicLoad("dependee.hpi"); } // (MUST) Not throws an exception // (SHOULD) depender successfully accesses to dependee. assertEquals("dependee", callDependerValue()); // No extensions exist. // extensions in depender is not loaded. assertTrue(r.jenkins.getExtensionList("org.jenkinsci.plugins.dependencytest.dependee.DependeeExtensionPoint").isEmpty()); } @Issue("JENKINS-21486") @Test public void installPluginWithObsoleteDependencyFails() throws Exception { // Load dependee 0.0.1. { dynamicLoad("dependee.hpi"); } // Load mandatory-depender 0.0.2, depending on dependee 0.0.2 try { dynamicLoad("mandatory-depender-0.0.2.hpi"); fail("Should not have worked"); } catch (IOException e) { // Expected } } @Issue("JENKINS-21486") @Test public void installPluginWithDisabledOptionalDependencySucceeds() throws Exception { // Load dependee 0.0.2. { dynamicLoadAndDisable("dependee-0.0.2.hpi"); } // Load depender 0.0.2, depending optionally on dependee 0.0.2 { dynamicLoad("depender-0.0.2.hpi"); } // dependee is not loaded so we cannot list any extension for it. try { r.jenkins.getExtensionList("org.jenkinsci.plugins.dependencytest.dependee.DependeeExtensionPoint"); fail(); } catch( ClassNotFoundException _ ){ } } @Issue("JENKINS-21486") @Test public void installPluginWithDisabledDependencyFails() throws Exception { // Load dependee 0.0.2. { dynamicLoadAndDisable("dependee-0.0.2.hpi"); } // Load mandatory-depender 0.0.2, depending on dependee 0.0.2 try { dynamicLoad("mandatory-depender-0.0.2.hpi"); fail("Should not have worked"); } catch (IOException e) { // Expected } } @Issue("JENKINS-21486") @Test public void installPluginWithObsoleteOptionalDependencyFails() throws Exception { // Load dependee 0.0.1. { dynamicLoad("dependee.hpi"); } // Load depender 0.0.2, depending optionally on dependee 0.0.2 try { dynamicLoad("depender-0.0.2.hpi"); fail("Should not have worked"); } catch (IOException e) { // Expected } } @Issue("JENKINS-12753") @WithPlugin("tasks.jpi") @Test public void dynamicLoadRestartRequiredException() throws Exception { File jpi = new File(r.jenkins.getRootDir(), "plugins/tasks.jpi"); assertTrue(jpi.isFile()); FileUtils.touch(jpi); File timestamp = new File(r.jenkins.getRootDir(), "plugins/tasks/.timestamp2"); assertTrue(timestamp.isFile()); long lastMod = timestamp.lastModified(); try { r.jenkins.getPluginManager().dynamicLoad(jpi); fail("should not have worked"); } catch (RestartRequiredException x) { // good } assertEquals("should not have tried to delete & unpack", lastMod, timestamp.lastModified()); } @WithPlugin("tasks.jpi") @Test public void pluginListJSONApi() throws IOException { JSONObject response = r.getJSON("pluginManager/plugins").getJSONObject(); // Check that the basic API endpoint invocation works. assertEquals("ok", response.getString("status")); JSONArray data = response.getJSONArray("data"); assertTrue(data.size() > 0); // Check that there was some data in the response and that the first entry // at least had some of the expected fields. JSONObject pluginInfo = data.getJSONObject(0); assertTrue(pluginInfo.getString("name") != null); assertTrue(pluginInfo.getString("title") != null); assertTrue(pluginInfo.getString("dependencies") != null); } @Issue("JENKINS-41684") @Test public void requireSystemDuringLoad() throws Exception { r.jenkins.setSecurityRealm(r.createDummySecurityRealm()); r.jenkins.setAuthorizationStrategy(new MockAuthorizationStrategy()); try (ACLContext context = ACL.as(User.get("underprivileged").impersonate())) { dynamicLoad("require-system-during-load.hpi"); } } private void dynamicLoad(String plugin) throws IOException, InterruptedException, RestartRequiredException { PluginManagerUtil.dynamicLoad(plugin, r.jenkins); } private void dynamicLoadAndDisable(String plugin) throws IOException, InterruptedException, RestartRequiredException { PluginManagerUtil.dynamicLoad(plugin, r.jenkins, true); } @Test public void uploadDependencyResolution() throws Exception { assumeFalse("TODO: Implement this test for Windows", Functions.isWindows()); PersistedList<UpdateSite> sites = r.jenkins.getUpdateCenter().getSites(); sites.clear(); URL url = PluginManagerTest.class.getResource("/plugins/upload-test-update-center.json"); UpdateSite site = new UpdateSite(UpdateCenter.ID_DEFAULT, url.toString()); sites.add(site); assertEquals(FormValidation.ok(), site.updateDirectly(false).get()); assertNotNull(site.getData()); // neither of the following plugins should be installed assertNull(r.jenkins.getPluginManager().getPlugin("Parameterized-Remote-Trigger")); assertNull(r.jenkins.getPluginManager().getPlugin("token-macro")); HtmlPage page = r.createWebClient().goTo("pluginManager/advanced"); HtmlForm f = page.getFormByName("uploadPlugin"); File dir = tmp.newFolder(); File plugin = new File(dir, "Parameterized-Remote-Trigger.hpi"); FileUtils.copyURLToFile(getClass().getClassLoader().getResource("plugins/Parameterized-Remote-Trigger.hpi"),plugin); f.getInputByName("name").setValueAttribute(plugin.getAbsolutePath()); r.submit(f); assertTrue(r.jenkins.getUpdateCenter().getJobs().size() > 0); // wait for all the download jobs to complete boolean done = true; boolean passed = true; do { Thread.sleep(100); done = true; for(UpdateCenterJob job : r.jenkins.getUpdateCenter().getJobs()) { if(job instanceof UpdateCenter.DownloadJob) { UpdateCenter.DownloadJob j = (UpdateCenter.DownloadJob)job; assertFalse(j.status instanceof UpdateCenter.DownloadJob.Failure); done &= !(((j.status instanceof UpdateCenter.DownloadJob.Pending) || (j.status instanceof UpdateCenter.DownloadJob.Installing))); } } } while(!done); // the files get renamed to .jpi assertTrue( new File(r.jenkins.getRootDir(),"plugins/Parameterized-Remote-Trigger.jpi").exists() ); assertTrue( new File(r.jenkins.getRootDir(),"plugins/token-macro.jpi").exists() ); // now the other plugins should have been found as dependencies and downloaded assertNotNull(r.jenkins.getPluginManager().getPlugin("Parameterized-Remote-Trigger")); assertNotNull(r.jenkins.getPluginManager().getPlugin("token-macro")); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.apache.phoenix.util.TestUtil.A_VALUE; import static org.apache.phoenix.util.TestUtil.ROW1; import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.sql.Connection; import java.sql.Date; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Properties; import org.apache.phoenix.exception.SQLExceptionCode; import org.apache.phoenix.jdbc.PhoenixConnection; import org.apache.phoenix.query.QueryServices; import org.apache.phoenix.util.PropertiesUtil; import org.junit.Test; public class UpsertSelectAutoCommitIT extends ParallelStatsDisabledIT { public UpsertSelectAutoCommitIT() { } @Test public void testAutoCommitUpsertSelect() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(true); String atable = generateUniqueName(); conn.createStatement().execute("CREATE TABLE " + atable + " (ORGANIZATION_ID CHAR(15) NOT NULL, ENTITY_ID CHAR(15) NOT NULL, A_STRING VARCHAR\n" + "CONSTRAINT pk PRIMARY KEY (organization_id, entity_id))"); String tenantId = getOrganizationId(); // Insert all rows at ts PreparedStatement stmt = conn.prepareStatement( "upsert into " + atable + "(" + " ORGANIZATION_ID, " + " ENTITY_ID, " + " A_STRING " + " )" + "VALUES (?, ?, ?)"); stmt.setString(1, tenantId); stmt.setString(2, ROW1); stmt.setString(3, A_VALUE); stmt.execute(); String query = "SELECT entity_id, a_string FROM " + atable; PreparedStatement statement = conn.prepareStatement(query); ResultSet rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(ROW1, rs.getString(1)); assertEquals(A_VALUE, rs.getString(2)); assertFalse(rs.next()); String atable2 = generateUniqueName(); conn.createStatement().execute("CREATE TABLE " + atable2 + " (ORGANIZATION_ID CHAR(15) NOT NULL, ENTITY_ID CHAR(15) NOT NULL, A_STRING VARCHAR\n" + "CONSTRAINT pk PRIMARY KEY (organization_id, entity_id DESC))"); conn.createStatement().execute("UPSERT INTO " + atable2 + " SELECT * FROM " + atable); query = "SELECT entity_id, a_string FROM " + atable2; statement = conn.prepareStatement(query); rs = statement.executeQuery(); assertTrue(rs.next()); assertEquals(ROW1, rs.getString(1)); assertEquals(A_VALUE, rs.getString(2)); assertFalse(rs.next()); } @Test public void testDynamicUpsertSelect() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String tableName = generateUniqueName(); String cursorDDL = " CREATE TABLE IF NOT EXISTS " + tableName + " (ORGANIZATION_ID VARCHAR(15) NOT NULL, \n" + "QUERY_ID VARCHAR(15) NOT NULL, \n" + "CURSOR_ORDER UNSIGNED_LONG NOT NULL, \n" + "CONSTRAINT API_HBASE_CURSOR_STORAGE_PK PRIMARY KEY (ORGANIZATION_ID, QUERY_ID, CURSOR_ORDER))\n" + "SALT_BUCKETS = 4"; conn.createStatement().execute(cursorDDL); String tableName2 = generateUniqueName(); String dataTableDDL = "CREATE TABLE IF NOT EXISTS " + tableName2 + "(" + "ORGANIZATION_ID CHAR(15) NOT NULL, " + "PLINY_ID CHAR(15) NOT NULL, " + "CREATED_DATE DATE NOT NULL, " + "TEXT VARCHAR, " + "CONSTRAINT PK PRIMARY KEY " + "(" + "ORGANIZATION_ID, " + "PLINY_ID, " + "CREATED_DATE" + ")" + ")"; conn.createStatement().execute(dataTableDDL); PreparedStatement stmt = null; String upsert = "UPSERT INTO " + tableName2 + " VALUES (?, ?, ?, ?)"; stmt = conn.prepareStatement(upsert); stmt.setString(1, getOrganizationId()); stmt.setString(2, "aaaaaaaaaaaaaaa"); stmt.setDate(3, new Date(System.currentTimeMillis())); stmt.setString(4, "text"); stmt.executeUpdate(); conn.commit(); String upsertSelect = "UPSERT INTO " + tableName + " (ORGANIZATION_ID, QUERY_ID, CURSOR_ORDER, PLINY_ID CHAR(15),CREATED_DATE DATE) SELECT ?, ?, ?, PLINY_ID, CREATED_DATE FROM " + tableName2 + " WHERE ORGANIZATION_ID = ?"; stmt = conn.prepareStatement(upsertSelect); String orgId = getOrganizationId(); stmt.setString(1, orgId); stmt.setString(2, "queryqueryquery"); stmt.setInt(3, 1); stmt.setString(4, orgId); stmt.executeUpdate(); conn.commit(); } @Test public void testUpsertSelectDoesntSeeUpsertedData() throws Exception { Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES); props.setProperty(QueryServices.MUTATE_BATCH_SIZE_BYTES_ATTRIB, Integer.toString(512)); props.setProperty(QueryServices.SCAN_CACHE_SIZE_ATTRIB, Integer.toString(3)); props.setProperty(QueryServices.SCAN_RESULT_CHUNK_SIZE, Integer.toString(3)); Connection conn = DriverManager.getConnection(getUrl(), props); conn.setAutoCommit(true); conn.createStatement().execute("CREATE SEQUENCE keys"); String tableName = generateUniqueName(); conn.createStatement().execute( "CREATE TABLE " + tableName + " (pk INTEGER PRIMARY KEY, val INTEGER)"); conn.createStatement().execute( "UPSERT INTO " + tableName + " VALUES (NEXT VALUE FOR keys,1)"); for (int i=0; i<6; i++) { Statement stmt = conn.createStatement(); int upsertCount = stmt.executeUpdate( "UPSERT INTO " + tableName + " SELECT NEXT VALUE FOR keys, val FROM " + tableName); assertEquals((int)Math.pow(2, i), upsertCount); } conn.close(); } @Test public void testMaxMutationSize() throws Exception { Properties connectionProperties = new Properties(); connectionProperties.setProperty(QueryServices.MAX_MUTATION_SIZE_ATTRIB, "3"); connectionProperties.setProperty(QueryServices.MAX_MUTATION_SIZE_BYTES_ATTRIB, "50000"); PhoenixConnection connection = (PhoenixConnection) DriverManager.getConnection(getUrl(), connectionProperties); connection.setAutoCommit(true); String fullTableName = generateUniqueName(); try (Statement stmt = connection.createStatement()) { stmt.execute( "CREATE TABLE " + fullTableName + " (pk INTEGER PRIMARY KEY, v1 INTEGER, v2 INTEGER)"); stmt.execute( "CREATE SEQUENCE " + fullTableName + "_seq cache 1000"); stmt.execute("UPSERT INTO " + fullTableName + " VALUES (NEXT VALUE FOR " + fullTableName + "_seq, rand(), rand())"); } try (Statement stmt = connection.createStatement()) { for (int i=0; i<16; i++) { stmt.execute("UPSERT INTO " + fullTableName + " SELECT NEXT VALUE FOR " + fullTableName + "_seq, rand(), rand() FROM " + fullTableName); } } connection.close(); } }
package eu.qualimaster.TimeTravelPip.topology; import java.util.*; import java.io.IOException; import java.lang.NoSuchMethodException; import java.lang.reflect.InvocationTargetException; import java.io.OutputStream; import java.io.Serializable; import java.net.Socket; import java.io.PrintWriter; import eu.qualimaster.common.logging.DataLogger; import java.net.UnknownHostException; import com.esotericsoftware.kryo.io.Output; import org.apache.log4j.Logger; import backtype.storm.tuple.*; import backtype.storm.task.*; import backtype.storm.topology.*; import eu.qualimaster.events.EventManager; import eu.qualimaster.monitoring.events.AlgorithmChangedMonitoringEvent; import eu.qualimaster.monitoring.events.ParameterChangedMonitoringEvent; import eu.qualimaster.families.inf.*; import eu.qualimaster.families.inf.IFDynamicGraphCompilation.*; import eu.qualimaster.families.imp.*; import eu.qualimaster.common.signal.*; import eu.qualimaster.base.algorithm.*; import eu.qualimaster.base.algorithm.IFamily.State; import eu.qualimaster.infrastructure.PipelineOptions; import eu.qualimaster.pipeline.DefaultModeException; import eu.qualimaster.pipeline.DefaultModeMonitoringEvent; import eu.qualimaster.base.serializer.KryoSwitchTupleSerializer; import backtype.storm.Config; import eu.qualimaster.base.pipeline.CollectingTopologyInfo; import eu.qualimaster.pipeline.AlgorithmChangeParameter; import eu.qualimaster.dynamicgraph.DynamicGraphCompilation; import eu.qualimaster.families.inf.IFCorrelationFinancial.*; import eu.qualimaster.families.inf.IFHayashiYoshida.*; import eu.qualimaster.families.inf.IFHayashiYoshida.*; import eu.qualimaster.base.algorithm.IItemEmitter; /** * Defines the FamilyElment in the pipeline(GEN). **/ @SuppressWarnings({ "rawtypes", "serial" }) public class PipelineVar_10_FamilyElement2FamilyElement extends BaseSignalBolt { final static Logger logger = Logger.getLogger(PipelineVar_10_FamilyElement2FamilyElement.class); transient OutputCollector _collector; private boolean algChange = false; private boolean firstTuple = false; private long record = 0; private int taskId; private transient FDynamicGraphCompilation.IFDynamicGraphCompilationEdgeStreamOutput edgeStreamResult = new FDynamicGraphCompilation.IFDynamicGraphCompilationEdgeStreamOutput(); private transient IFDynamicGraphCompilation alg = null; transient IIFCorrelationFinancialPairwiseFinancialOutput iTuplePairwiseFinancial = null; transient IIFDynamicGraphCompilationPairwiseFinancialInput inputPairwiseFinancial = null; public PipelineVar_10_FamilyElement2FamilyElement(String name, String namespace) { super(name, namespace, true); } /** * Sends an algorithm change event and considers whether the coordination layer shall be bypassed for direct * testing. * @param algorithm the new algorithm * @param causeMsgId the message id of the causing message (may be empty or null) */ private void sendAlgorithmChangeEvent(String algorithm, String causeMsgId) { EventManager.send(new AlgorithmChangedMonitoringEvent(getPipeline(), getName(), algorithm, causeMsgId)); } /** * Sends an parameter change event and considers whether the coordination layer shall be bypassed for direct * testing. * @param parameter the parameter to be changed * @param value the new value * @param causeMsgId the message id of the causing message (may be empty or null) */ private void sendParameterChangeEvent(String parameter, Serializable value, String causeMsgId) { EventManager.send(new ParameterChangedMonitoringEvent(getPipeline(), getName(), parameter, value, causeMsgId)); } /** * Sends an a default mode monitoring event with a DefaultModeException case. * @param exceptionCase the DefaultModeException case */ private void sendDefaultModeMonitoringEvent(DefaultModeException exceptionCase) { EventManager.send(new DefaultModeMonitoringEvent(getPipeline(), getName(), exceptionCase)); } public void prepare(Map map, TopologyContext topologyContext, OutputCollector collector) { super.prepare(map, topologyContext, collector); _collector = collector; taskId = topologyContext.getThisTaskId(); algChange = false; edgeStreamResult = new FDynamicGraphCompilation.IFDynamicGraphCompilationEdgeStreamOutput(); IItemEmitter<IIFDynamicGraphCompilationEdgeStreamOutput> edgeStreamEmitter = new IItemEmitter<IIFDynamicGraphCompilationEdgeStreamOutput>() { @Override public void emitDirect(String streamId, IIFDynamicGraphCompilationEdgeStreamOutput item) { _collector.emit("PipelineVar_10_FamilyElement2EdgeStream", new Values(item)); } }; edgeStreamResult.setEmitter(edgeStreamEmitter); if("STATIC".equals(map.get(Constants.CONFIG_KEY_INIT_MODE))) { try { try { Class cls = Class.forName("eu.qualimaster.dynamicgraph.DynamicGraphCompilation"); alg = (IFDynamicGraphCompilation) cls.newInstance(); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } if (PipelineOptions.hasExecutorArgument(map, getName(), "correlationThreshold")) { alg.setParameterCorrelationThreshold(PipelineOptions.getExecutorDoubleArgument(map, getName(), "correlationThreshold", 0.0)); } if (alg != null) { alg.switchState(State.ACTIVATE); //activate the current algorithm } sendAlgorithmChangeEvent("DynamicGraphCompilation", null); algChange = true; } catch (Throwable e) { if (e instanceof DefaultModeException) { logger.error("Throwable caught - turning to default mode"); e.printStackTrace(); sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } initMonitor(); logger.info("The end of the prepare method."); } @Override protected boolean initMonitorDuringPrepare() { return false; } public void forwardTuple(Object tupleItem) { startMonitoring(); // delegate to family "fDynamicGraphCompilation" if (!firstTuple) {//the first tuple arrived firstTuple = true; record = System.currentTimeMillis(); } if(tupleItem instanceof IIFCorrelationFinancialPairwiseFinancialOutput) { iTuplePairwiseFinancial = (IIFCorrelationFinancialPairwiseFinancialOutput)tupleItem; inputPairwiseFinancial = new FDynamicGraphCompilation.IFDynamicGraphCompilationPairwiseFinancialInput(); inputPairwiseFinancial.setId0(iTuplePairwiseFinancial.getId0()); inputPairwiseFinancial.setId1(iTuplePairwiseFinancial.getId1()); inputPairwiseFinancial.setDate(iTuplePairwiseFinancial.getDate()); inputPairwiseFinancial.setValue(iTuplePairwiseFinancial.getValue()); try { if(alg != null) { alg.calculate(inputPairwiseFinancial, edgeStreamResult); } else { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second logger.error("Algorithm is not assigned!"); record = now; } } } catch(Throwable e) { long now = System.currentTimeMillis(); if ((now - record) > 1000) {//print this error log once per second e.printStackTrace(); record = now; } edgeStreamResult.setEdge(""); if (e instanceof DefaultModeException) { sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } if(!(alg instanceof ITopologyCreate)) { eu.qualimaster.base.algorithm.IOutputItemIterator<IIFDynamicGraphCompilationEdgeStreamOutput> iterEdgeStream = edgeStreamResult.iterator(); iterEdgeStream.reset(); while (iterEdgeStream.hasNext()) { IIFDynamicGraphCompilationEdgeStreamOutput out = iterEdgeStream.next(); _collector.emit("PipelineVar_10_FamilyElement2EdgeStream", new Values(out)); } } // _collector.ack(tuple); endMonitoring(); } @Override public void execute(Tuple tuple) { forwardTuple(tuple.getValue(0)); } @Override public void notifyParameterChange(ParameterChangeSignal signal) { try { for(int i = 0; alg!=null && i < signal.getChangeCount(); i++) { ParameterChange para = signal.getChange(i); switch (para.getName()) { case "correlationThreshold" : try { System.out.println("Received parameter changing signal correlationThreshold"); alg.setParameterCorrelationThreshold(para.getDoubleValue()); sendParameterChangeEvent("correlationThreshold", para.getDoubleValue(), signal.getCauseMessageId()); } catch (ValueFormatException e) { e.printStackTrace(); } break; } } } catch (Throwable e) { if (e instanceof DefaultModeException) { logger.error("Throwable caught - turning to default mode"); e.printStackTrace(); sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } @Override public void notifyAlgorithmChange(AlgorithmChangeSignal signal) { logger.info("Received algorithm switching signal " + signal.getAlgorithm()); try { switch (signal.getAlgorithm()) { case "DynamicGraphCompilation": if (!algChange || !(alg instanceof DynamicGraphCompilation)) { algChange = true; if(alg != null) { alg.switchState(State.PASSIVATE); //passivate the previous algorithm } try { Class cls = Class.forName("eu.qualimaster.dynamicgraph.DynamicGraphCompilation"); alg = (IFDynamicGraphCompilation) cls.newInstance(); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (InstantiationException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } alg.setParameterCorrelationThreshold(0.0); sendAlgorithmChangeEvent("DynamicGraphCompilation", signal.getCauseMessageId()); if(alg != null) { alg.switchState(State.ACTIVATE); //activate the current algorithm } } break; } super.notifyAlgorithmChange(signal); } catch (Throwable e) { if (e instanceof DefaultModeException) { logger.error("Throwable caught - turning to default mode"); e.printStackTrace(); sendDefaultModeMonitoringEvent((DefaultModeException) e); } } } @Override public void cleanup() { super.cleanup(); if(alg != null) { alg.switchState(State.TERMINATING); } } public void declareOutputFields(OutputFieldsDeclarer declarer) { declarer.declareStream("PipelineVar_10_FamilyElement2EdgeStream", new Fields("PipelineVar_10_FamilyElement2EdgeStreamFields")); } @Override protected void prepareShutdown(ShutdownSignal signal) { super.prepareShutdown(signal); if(alg != null) { alg.switchState(State.TERMINATING); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1p2beta1/video_intelligence.proto package com.google.cloud.videointelligence.v1p2beta1; public final class VideoIntelligenceServiceProto { private VideoIntelligenceServiceProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_Entity_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_Entity_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\nAgoogle/cloud/videointelligence/v1p2bet" + "a1/video_intelligence.proto\022(google.clou" + "d.videointelligence.v1p2beta1\032\034google/ap" + "i/annotations.proto\032#google/longrunning/" + "operations.proto\032\036google/protobuf/durati" + "on.proto\032\037google/protobuf/timestamp.prot" + "o\032\027google/rpc/status.proto\"\375\001\n\024AnnotateV" + "ideoRequest\022\021\n\tinput_uri\030\001 \001(\t\022\025\n\rinput_" + "content\030\006 \001(\014\022C\n\010features\030\002 \003(\01621.google" + ".cloud.videointelligence.v1p2beta1.Featu" + "re\022M\n\rvideo_context\030\003 \001(\01326.google.cloud" + ".videointelligence.v1p2beta1.VideoContex" + "t\022\022\n\noutput_uri\030\004 \001(\t\022\023\n\013location_id\030\005 \001" + "(\t\"\366\003\n\014VideoContext\022H\n\010segments\030\001 \003(\01326." + "google.cloud.videointelligence.v1p2beta1" + ".VideoSegment\022^\n\026label_detection_config\030" + "\002 \001(\0132>.google.cloud.videointelligence.v" + "1p2beta1.LabelDetectionConfig\022i\n\034shot_ch" + "ange_detection_config\030\003 \001(\0132C.google.clo" + "ud.videointelligence.v1p2beta1.ShotChang" + "eDetectionConfig\022s\n!explicit_content_det" + "ection_config\030\004 \001(\0132H.google.cloud.video" + "intelligence.v1p2beta1.ExplicitContentDe" + "tectionConfig\022\\\n\025text_detection_config\030\010" + " \001(\0132=.google.cloud.videointelligence.v1" + "p2beta1.TextDetectionConfig\"\234\001\n\024LabelDet" + "ectionConfig\022Z\n\024label_detection_mode\030\001 \001" + "(\0162<.google.cloud.videointelligence.v1p2" + "beta1.LabelDetectionMode\022\031\n\021stationary_c" + "amera\030\002 \001(\010\022\r\n\005model\030\003 \001(\t\"*\n\031ShotChange" + "DetectionConfig\022\r\n\005model\030\001 \001(\t\"/\n\036Explic" + "itContentDetectionConfig\022\r\n\005model\030\001 \001(\t\"" + "-\n\023TextDetectionConfig\022\026\n\016language_hints" + "\030\001 \003(\t\"x\n\014VideoSegment\0224\n\021start_time_off" + "set\030\001 \001(\0132\031.google.protobuf.Duration\0222\n\017" + "end_time_offset\030\002 \001(\0132\031.google.protobuf." + "Duration\"k\n\014LabelSegment\022G\n\007segment\030\001 \001(" + "\01326.google.cloud.videointelligence.v1p2b" + "eta1.VideoSegment\022\022\n\nconfidence\030\002 \001(\002\"P\n" + "\nLabelFrame\022.\n\013time_offset\030\001 \001(\0132\031.googl" + "e.protobuf.Duration\022\022\n\nconfidence\030\002 \001(\002\"" + "G\n\006Entity\022\021\n\tentity_id\030\001 \001(\t\022\023\n\013descript" + "ion\030\002 \001(\t\022\025\n\rlanguage_code\030\003 \001(\t\"\260\002\n\017Lab" + "elAnnotation\022@\n\006entity\030\001 \001(\01320.google.cl" + "oud.videointelligence.v1p2beta1.Entity\022K" + "\n\021category_entities\030\002 \003(\01320.google.cloud" + ".videointelligence.v1p2beta1.Entity\022H\n\010s" + "egments\030\003 \003(\01326.google.cloud.videointell" + "igence.v1p2beta1.LabelSegment\022D\n\006frames\030" + "\004 \003(\01324.google.cloud.videointelligence.v" + "1p2beta1.LabelFrame\"\234\001\n\024ExplicitContentF" + "rame\022.\n\013time_offset\030\001 \001(\0132\031.google.proto" + "buf.Duration\022T\n\026pornography_likelihood\030\002" + " \001(\01624.google.cloud.videointelligence.v1" + "p2beta1.Likelihood\"k\n\031ExplicitContentAnn" + "otation\022N\n\006frames\030\001 \003(\0132>.google.cloud.v" + "ideointelligence.v1p2beta1.ExplicitConte" + "ntFrame\"Q\n\025NormalizedBoundingBox\022\014\n\004left" + "\030\001 \001(\002\022\013\n\003top\030\002 \001(\002\022\r\n\005right\030\003 \001(\002\022\016\n\006bo" + "ttom\030\004 \001(\002\"\313\005\n\026VideoAnnotationResults\022\021\n" + "\tinput_uri\030\001 \001(\t\022\\\n\031segment_label_annota" + "tions\030\002 \003(\01329.google.cloud.videointellig" + "ence.v1p2beta1.LabelAnnotation\022Y\n\026shot_l" + "abel_annotations\030\003 \003(\01329.google.cloud.vi" + "deointelligence.v1p2beta1.LabelAnnotatio" + "n\022Z\n\027frame_label_annotations\030\004 \003(\01329.goo" + "gle.cloud.videointelligence.v1p2beta1.La" + "belAnnotation\022P\n\020shot_annotations\030\006 \003(\0132" + "6.google.cloud.videointelligence.v1p2bet" + "a1.VideoSegment\022`\n\023explicit_annotation\030\007" + " \001(\0132C.google.cloud.videointelligence.v1" + "p2beta1.ExplicitContentAnnotation\022R\n\020tex" + "t_annotations\030\014 \003(\01328.google.cloud.video" + "intelligence.v1p2beta1.TextAnnotation\022^\n" + "\022object_annotations\030\016 \003(\0132B.google.cloud" + ".videointelligence.v1p2beta1.ObjectTrack" + "ingAnnotation\022!\n\005error\030\t \001(\0132\022.google.rp" + "c.Status\"u\n\025AnnotateVideoResponse\022\\\n\022ann" + "otation_results\030\001 \003(\0132@.google.cloud.vid" + "eointelligence.v1p2beta1.VideoAnnotation" + "Results\"\247\001\n\027VideoAnnotationProgress\022\021\n\ti" + "nput_uri\030\001 \001(\t\022\030\n\020progress_percent\030\002 \001(\005" + "\022.\n\nstart_time\030\003 \001(\0132\032.google.protobuf.T" + "imestamp\022/\n\013update_time\030\004 \001(\0132\032.google.p" + "rotobuf.Timestamp\"w\n\025AnnotateVideoProgre" + "ss\022^\n\023annotation_progress\030\001 \003(\0132A.google" + ".cloud.videointelligence.v1p2beta1.Video" + "AnnotationProgress\"(\n\020NormalizedVertex\022\t" + "\n\001x\030\001 \001(\002\022\t\n\001y\030\002 \001(\002\"f\n\026NormalizedBoundi" + "ngPoly\022L\n\010vertices\030\001 \003(\0132:.google.cloud." + "videointelligence.v1p2beta1.NormalizedVe" + "rtex\"\257\001\n\013TextSegment\022G\n\007segment\030\001 \001(\01326." + "google.cloud.videointelligence.v1p2beta1" + ".VideoSegment\022\022\n\nconfidence\030\002 \001(\002\022C\n\006fra" + "mes\030\003 \003(\01323.google.cloud.videointelligen" + "ce.v1p2beta1.TextFrame\"\233\001\n\tTextFrame\022^\n\024" + "rotated_bounding_box\030\001 \001(\0132@.google.clou" + "d.videointelligence.v1p2beta1.Normalized" + "BoundingPoly\022.\n\013time_offset\030\002 \001(\0132\031.goog" + "le.protobuf.Duration\"g\n\016TextAnnotation\022\014" + "\n\004text\030\001 \001(\t\022G\n\010segments\030\002 \003(\01325.google." + "cloud.videointelligence.v1p2beta1.TextSe" + "gment\"\247\001\n\023ObjectTrackingFrame\022`\n\027normali" + "zed_bounding_box\030\001 \001(\0132?.google.cloud.vi" + "deointelligence.v1p2beta1.NormalizedBoun" + "dingBox\022.\n\013time_offset\030\002 \001(\0132\031.google.pr" + "otobuf.Duration\"\210\002\n\030ObjectTrackingAnnota" + "tion\022@\n\006entity\030\001 \001(\01320.google.cloud.vide" + "ointelligence.v1p2beta1.Entity\022\022\n\nconfid" + "ence\030\004 \001(\002\022M\n\006frames\030\002 \003(\0132=.google.clou" + "d.videointelligence.v1p2beta1.ObjectTrac" + "kingFrame\022G\n\007segment\030\003 \001(\01326.google.clou" + "d.videointelligence.v1p2beta1.VideoSegme" + "nt*\233\001\n\007Feature\022\027\n\023FEATURE_UNSPECIFIED\020\000\022" + "\023\n\017LABEL_DETECTION\020\001\022\031\n\025SHOT_CHANGE_DETE" + "CTION\020\002\022\036\n\032EXPLICIT_CONTENT_DETECTION\020\003\022" + "\022\n\016TEXT_DETECTION\020\007\022\023\n\017OBJECT_TRACKING\020\t" + "*r\n\022LabelDetectionMode\022$\n LABEL_DETECTIO" + "N_MODE_UNSPECIFIED\020\000\022\r\n\tSHOT_MODE\020\001\022\016\n\nF" + "RAME_MODE\020\002\022\027\n\023SHOT_AND_FRAME_MODE\020\003*t\n\n" + "Likelihood\022\032\n\026LIKELIHOOD_UNSPECIFIED\020\000\022\021" + "\n\rVERY_UNLIKELY\020\001\022\014\n\010UNLIKELY\020\002\022\014\n\010POSSI" + "BLE\020\003\022\n\n\006LIKELY\020\004\022\017\n\013VERY_LIKELY\020\0052\262\001\n\030V" + "ideoIntelligenceService\022\225\001\n\rAnnotateVide" + "o\022>.google.cloud.videointelligence.v1p2b" + "eta1.AnnotateVideoRequest\032\035.google.longr" + "unning.Operation\"%\202\323\344\223\002\037\"\032/v1p2beta1/vid" + "eos:annotate:\001*B\200\002\n,com.google.cloud.vid" + "eointelligence.v1p2beta1B\035VideoIntellige" + "nceServiceProtoP\001ZYgoogle.golang.org/gen" + "proto/googleapis/cloud/videointelligence" + "/v1p2beta1;videointelligence\252\002(Google.Cl" + "oud.VideoIntelligence.V1P2Beta1\312\002(Google" + "\\Cloud\\VideoIntelligence\\V1p2beta1b\006prot" + "o3" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), com.google.longrunning.OperationsProto.getDescriptor(), com.google.protobuf.DurationProto.getDescriptor(), com.google.protobuf.TimestampProto.getDescriptor(), com.google.rpc.StatusProto.getDescriptor(), }, assigner); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoRequest_descriptor, new java.lang.String[] { "InputUri", "InputContent", "Features", "VideoContext", "OutputUri", "LocationId", }); internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_VideoContext_descriptor, new java.lang.String[] { "Segments", "LabelDetectionConfig", "ShotChangeDetectionConfig", "ExplicitContentDetectionConfig", "TextDetectionConfig", }); internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_LabelDetectionConfig_descriptor, new java.lang.String[] { "LabelDetectionMode", "StationaryCamera", "Model", }); internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ShotChangeDetectionConfig_descriptor, new java.lang.String[] { "Model", }); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentDetectionConfig_descriptor, new java.lang.String[] { "Model", }); internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_TextDetectionConfig_descriptor, new java.lang.String[] { "LanguageHints", }); internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_VideoSegment_descriptor, new java.lang.String[] { "StartTimeOffset", "EndTimeOffset", }); internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_LabelSegment_descriptor, new java.lang.String[] { "Segment", "Confidence", }); internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_LabelFrame_descriptor, new java.lang.String[] { "TimeOffset", "Confidence", }); internal_static_google_cloud_videointelligence_v1p2beta1_Entity_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_videointelligence_v1p2beta1_Entity_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_Entity_descriptor, new java.lang.String[] { "EntityId", "Description", "LanguageCode", }); internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_LabelAnnotation_descriptor, new java.lang.String[] { "Entity", "CategoryEntities", "Segments", "Frames", }); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentFrame_descriptor, new java.lang.String[] { "TimeOffset", "PornographyLikelihood", }); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ExplicitContentAnnotation_descriptor, new java.lang.String[] { "Frames", }); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingBox_descriptor, new java.lang.String[] { "Left", "Top", "Right", "Bottom", }); internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationResults_descriptor, new java.lang.String[] { "InputUri", "SegmentLabelAnnotations", "ShotLabelAnnotations", "FrameLabelAnnotations", "ShotAnnotations", "ExplicitAnnotation", "TextAnnotations", "ObjectAnnotations", "Error", }); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_descriptor = getDescriptor().getMessageTypes().get(15); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoResponse_descriptor, new java.lang.String[] { "AnnotationResults", }); internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_descriptor = getDescriptor().getMessageTypes().get(16); internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_VideoAnnotationProgress_descriptor, new java.lang.String[] { "InputUri", "ProgressPercent", "StartTime", "UpdateTime", }); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_descriptor = getDescriptor().getMessageTypes().get(17); internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_AnnotateVideoProgress_descriptor, new java.lang.String[] { "AnnotationProgress", }); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_descriptor = getDescriptor().getMessageTypes().get(18); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedVertex_descriptor, new java.lang.String[] { "X", "Y", }); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_descriptor = getDescriptor().getMessageTypes().get(19); internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_NormalizedBoundingPoly_descriptor, new java.lang.String[] { "Vertices", }); internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_descriptor = getDescriptor().getMessageTypes().get(20); internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_TextSegment_descriptor, new java.lang.String[] { "Segment", "Confidence", "Frames", }); internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_descriptor = getDescriptor().getMessageTypes().get(21); internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_TextFrame_descriptor, new java.lang.String[] { "RotatedBoundingBox", "TimeOffset", }); internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_descriptor = getDescriptor().getMessageTypes().get(22); internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_descriptor, new java.lang.String[] { "Text", "Segments", }); internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_descriptor = getDescriptor().getMessageTypes().get(23); internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingFrame_descriptor, new java.lang.String[] { "NormalizedBoundingBox", "TimeOffset", }); internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_descriptor = getDescriptor().getMessageTypes().get(24); internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_videointelligence_v1p2beta1_ObjectTrackingAnnotation_descriptor, new java.lang.String[] { "Entity", "Confidence", "Frames", "Segment", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.AnnotationsProto.http); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.AnnotationsProto.getDescriptor(); com.google.longrunning.OperationsProto.getDescriptor(); com.google.protobuf.DurationProto.getDescriptor(); com.google.protobuf.TimestampProto.getDescriptor(); com.google.rpc.StatusProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
/* * JBoss, Home of Professional Open Source * Copyright 2013, Red Hat, Inc., and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.as.quickstarts.kitchensink.html5.mobile.demo.pages; import static org.jboss.arquillian.graphene.Graphene.element; import java.text.MessageFormat; import java.util.concurrent.TimeUnit; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.jboss.arquillian.graphene.Graphene; import org.jboss.arquillian.graphene.enricher.findby.ByJQuery; import org.jboss.arquillian.graphene.enricher.findby.FindBy; import org.jboss.as.quickstarts.kitchensink.html5.mobile.demo.dto.NameValuePair; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.ui.ExpectedCondition; /** * Represents the registration page. * * @author <a href="mailto:aemmanou@redhat.com">Tolis Emmanouilidis</a> * */ public class RegistrationPage extends KitchensinkDemoPage { /** * The id attribute's value for the name field. */ private static final String nameFieldId = "name"; /** * The id attribute's value for the email field. */ private static final String emailFieldId = "email"; /** * The id attribute's value for the phone number field. */ private static final String phoneNumberFieldId = "phoneNumber"; /** * The id attribute's value for the cancel field. */ private static final String cancelFieldId = "cancel"; /** * The id attribute's value for the submit field. */ private static final String submitFieldId = "register"; /** * The class value for the success message. */ private static final String successMessageClassName = "success"; /** * Error message which appears in registration when the email is already used. */ private static final String emailTakenErrorMessage = "Email taken"; /** * Error message which appears in registration when the name is empty or invalid. */ private static final String invalidNameErrorMessage = "1-25 letters and spaces"; /** * Error message which appears in registration when the email is empty. */ private static final String emptyEmailErrorMessage = "may not be empty"; /** * Error message which appears in registration when the email is invalid. */ private static final String invalidEmailErrorMessage = "Invalid format"; /** * Error message which appears in registration when the phone number is empty. */ private static final String emptyPhoneErrorMessage = "Not valid"; /** * Error message which appears in registration when the phone number is empty or invalid. */ private static final String invalidPhoneErrorMessage = "10-12 Numbers"; /** * Locator for the form. */ @FindBy(id = "reg") private WebElement form; /** * Locator for the form messages. */ @FindBy(id = "formMsgs") private WebElement formMsg; /** * Pattern to locate the parent div for each input field. */ private static final String inputFieldParentPattern = "div:has(input[id=\"{0}\"])"; /** * Invalid message class name. */ private static final String invalidMsgClassName = "invalid"; /** * Waits until the car registration page is loaded. */ @Override public void waitUntilPageIsLoaded() { super.waitUntilPageIsLoaded(); // wait until the form is visible waitUntilFormIsVisible(); } /** * Waits until the form is visible. */ public void waitUntilFormIsVisible() { Graphene.waitModel().withTimeout(20, TimeUnit.SECONDS).until(element(form).isVisible()); } /** * Fills the form dynamically. * * @param nameValuePairs The {@link NameValuePair} array which contains the field name and the field value. */ public void fillForm(NameValuePair[] nameValuePairs) { if (!ArrayUtils.isEmpty(nameValuePairs)) { for (NameValuePair p : nameValuePairs) { if (p != null) { final WebElement field = form.findElement(By.id(p.getName())); clearField(field); fillField(field, p.getValue()); } } } } /** * Submits the form by pressing the submit button. * * @param id The submit's button id. */ public void submitFormByButton(String id) { final WebElement submitButton = form.findElement(By.id(id)); Graphene.waitModel().withTimeout(10, TimeUnit.SECONDS).until(element(submitButton).isVisible()); submitButton.click(); } /** * Cancels the form submission. * * @param id The cancel's button id. */ public void cancel(String id) { final WebElement cancelButton = form.findElement(By.id(id)); Graphene.waitModel().withTimeout(10, TimeUnit.SECONDS).until(element(cancelButton).isVisible()); cancelButton.click(); } /** * Finds a WebElement given the id. * * @param id The element id. * @return a {@link WebElement} */ public WebElement getWebElementById(String id) { return form.findElement(By.id(id)); } /** * Fills the form and submits it. * * @param nameValuePairs The {@link NameValuePair} array which contains the field name and the field value. * @param submitButtonId the submit's button id. */ public void fillFormAndSubmit(NameValuePair[] nameValuePairs, String submitButtonId) { fillForm(nameValuePairs); submitFormByButton(submitButtonId); } /** * Clears a field. * * @param field The {@link WebElement} to be cleared. */ public void clearField(WebElement field) { field.clear(); } /** * Fills a field with a value. * * @param field The {@link WebElement} to be filled. * @param keys The value. */ public void fillField(WebElement field, String keys) { field.sendKeys(keys); } /** * Waits until the success message is visible. * * @param className The success message span className. */ public void waitUntilSuccessMessageIsVisible(String className) { Graphene.waitModel().withTimeout(20, TimeUnit.SECONDS).until(element(formMsg).isVisible()); final WebElement successMsg = formMsg.findElement(By.className(className)); Graphene.waitModel().withTimeout(10, TimeUnit.SECONDS).until(element(successMsg).isVisible()); } /** * Finds the invalid message which corresponds to an input field. * * @param inputId The input field id. * @return The error message or empty String. */ public String getInvalidMessage(String inputId) { final WebElement divContainer = form.findElement(ByJQuery.jquerySelector(MessageFormat.format(inputFieldParentPattern, inputId))); final WebElement invalidMessage = divContainer.findElement(By.className(invalidMsgClassName)); Graphene.waitModel().withTimeout(10, TimeUnit.SECONDS).until(new ExpectedCondition<Boolean>() { @Override public Boolean apply(WebDriver notUsed) { return invalidMessage != null && !StringUtils.isEmpty(invalidMessage.getText()); } }); return (invalidMessage != null) ? invalidMessage.getText() : ""; } /** * Performs a member registration. * * @param name The member's name. * @param email The member's email. * @param phoneNumber The member's phone number. */ public void registerMember(String name, String email, String phoneNumber) { final NameValuePair[] nameValuePairs = { new NameValuePair(nameFieldId, name), new NameValuePair(emailFieldId, email), new NameValuePair(phoneNumberFieldId, phoneNumber) }; fillFormAndSubmit(nameValuePairs, submitFieldId); } /** * Waits for the success message to appear. */ public void waitUntilSuccessMessageIsVisible() { waitUntilSuccessMessageIsVisible(successMessageClassName); } /** * Cancels the registration procedure. */ public void cancelRegistration() { cancel(cancelFieldId); } /** * Gets the email invalid message. * * @return email invalid message */ public String getEmailInvalidMessage() { return getInvalidMessage(emailFieldId); } /** * Gets the name invalid message. * * @return name invalid message */ public String getNameInvalidMessage() { return getInvalidMessage(nameFieldId); } /** * Gets the phone invalid message. * * @return phone invalid message */ public String getPhoneInvalidMessage() { return getInvalidMessage(phoneNumberFieldId); } /** * Gets the email taken error message. * * @return email taken error message */ public String getEmailTakenErrorMessage() { return emailTakenErrorMessage; } /** * Gets the invalid name error message. * * @return invalid name error message */ public String getInvalidNameErrorMessage() { return invalidNameErrorMessage; } /** * Gets the empty email error message. * * @return empty email error message */ public String getEmptyEmailErrorMessage() { return emptyEmailErrorMessage; } /** * Gets the invalid email error message. * * @return invalid email error message */ public String getInvalidEmailErrorMessage() { return invalidEmailErrorMessage; } /** * Gets the empty phone error message. * * @return empty phone error message */ public String getEmptyPhoneErrorMessage() { return emptyPhoneErrorMessage; } /** * Gets the invalid phone error message. * * @return invalid phone error message */ public String getInvalidPhoneErrorMessage() { return invalidPhoneErrorMessage; } }
// @formatter:off /** * Copyright 2016 Bernard Ladenthin bernard.ladenthin@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ // @formatter:on package net.ladenthin.jcputhrottle; import com.sun.jna.platform.win32.Guid; import com.sun.jna.platform.win32.Kernel32; import com.sun.jna.platform.win32.WinDef; import com.sun.jna.ptr.PointerByReference; import static com.sun.jna.platform.win32.WinError.ERROR_SUCCESS; /** * @author Bernard Ladenthin bernard.ladenthin@gmail.com */ public class PowrProfJNA { private final PowrProf powrProf; // ################################################################################ // Settings belonging to no subgroup public final static Guid.GUID SUB_NONE = new Guid.GUID("fea3413e-7e05-4911-9a71-700331f1c294"); // is alias // Require a password on wakeup public final static Guid.GUID CONSOLELOCK = new Guid.GUID("0e796bdb-100d-47d6-a2d5-f7d2daa51f51"); // is alias // ################################################################################ // Hard disk public final static Guid.GUID SUB_DISK = new Guid.GUID("0012ee47-9041-4b5d-9b77-535fba8b1442"); // is alias // Turn off hard disk after public final static Guid.GUID DISKIDLE = new Guid.GUID("6738e2c4-e8a5-4a42-b16a-e040e769756e"); // is alias // ################################################################################ // Internet Explorer public final static Guid.GUID SUB_INTERNETEXPLORER = new Guid.GUID("02f815b5-a5cf-4c84-bf20-649d1f75d3d8"); // no alias // JavaScript Timer Frequency public final static Guid.GUID IEJavaScriptTimerFrequency = new Guid.GUID("4c793e7d-a264-42e1-87d3-7a0d2f523ccd"); // no alias // ################################################################################ // Desktop background settings public final static Guid.GUID SUB_DESKTOPBACKGROUNDSETTINGS = new Guid.GUID("0d7dbae2-4294-402a-ba8e-26777e8488cd"); // no alias // Slide show public final static Guid.GUID SLIDESHOW = new Guid.GUID("309dce9b-bef4-4119-9921-a851fb12f0f4"); // no alias // ################################################################################ // Wireless Adapter Settings public final static Guid.GUID SUB_WIRELESSADAPTERSETTINGS = new Guid.GUID("19cbb8fa-5279-450e-9fac-8a3d5fedd0c1"); // no alias // Power Saving Mode public final static Guid.GUID WIRELESSADAPTERPOWERSAVINGMODE = new Guid.GUID("12bbebe6-58d6-4636-95bb-3217ef867c1a"); // no alias // ################################################################################ // Sleep public final static Guid.GUID SUB_SLEEP = new Guid.GUID("238C9FA8-0AAD-41ED-83F4-97BE242C8F20"); // is alias // Sleep after public final static Guid.GUID STANDBYIDLE = new Guid.GUID("29f6c1db-86da-48c5-9fdb-f2b67b1f44da"); // is alias // Allow hybrid sleep public final static Guid.GUID HYBRIDSLEEP = new Guid.GUID("94ac6d29-73ce-41a6-809f-6363ba21b47e"); // is alias // Hibernate after public final static Guid.GUID HIBERNATEIDLE = new Guid.GUID("9d7815a6-7ee4-497e-8888-515a05f02364"); // is alias // Allow wake timers public final static Guid.GUID RTCWAKE = new Guid.GUID("bd3b718a-0680-4d9d-8ab2-e1d2b4ac806d"); // is alias // ################################################################################ // USB settings public final static Guid.GUID SUB_USBSETTINGS = new Guid.GUID("2a737441-1930-4402-8d77-b2bebba308a3"); // no alias // USB selective suspend setting public final static Guid.GUID USB_Selective_Suspend = new Guid.GUID("48e6b7a6-50f5-4782-a5d4-53bb8f07e226"); // no alias // ################################################################################ // Idle Resiliency public final static Guid.GUID SUB_IR = new Guid.GUID("2e601130-5351-4d9d-8e04-252966bad054"); // is alias // ################################################################################ // Interrupt Steering Settings public final static Guid.GUID SUB_INTSTEER = new Guid.GUID("48672f38-7a9a-4bb2-8bf8-3d85be19de4e"); // is alias // ################################################################################ // Power buttons and lid public final static Guid.GUID SUB_BUTTONS = new Guid.GUID("4f971e89-eebd-4455-a8de-9e59040e7347"); // is alias // Lid close action public final static Guid.GUID LIDACTION = new Guid.GUID("5ca83367-6e45-459f-a27b-476b1d01c936"); // is alias // Power button action public final static Guid.GUID PBUTTONACTION = new Guid.GUID("7648efa3-dd9c-4e3e-b566-50f929386280"); // is alias // Sleep button action public final static Guid.GUID SBUTTONACTION = new Guid.GUID("96996bc0-ad50-47ec-923b-6f41874dd9eb"); // is alias // Start menu power button public final static Guid.GUID UIBUTTON_ACTION = new Guid.GUID("a7066653-8d6c-40a8-910e-a1f54b84c7e5"); // is alias // ################################################################################ // PCI Express public final static Guid.GUID SUB_PCIEXPRESS = new Guid.GUID("501a4d13-42af-4429-9fd1-a8218c268e20"); // is alias // Link State Power Management public final static Guid.GUID ASPM = new Guid.GUID("ee12f906-d277-404b-b6da-e5fa1a576df5"); // is alias // ################################################################################ // Processor power management public final static Guid.GUID SUB_PROCESSOR = new Guid.GUID("54533251-82be-4824-96c1-47b60b740d00"); // is alias // Minimum processor state public final static Guid.GUID PROCTHROTTLEMIN = new Guid.GUID("893dee8e-2bef-41e0-89c6-b55d0929964c"); // is alias // System cooling policy public final static Guid.GUID SYSCOOLPOL = new Guid.GUID("94d3a615-a899-4ac5-ae2b-e4d8f634367f"); // is alias // Maximum processor state public final static Guid.GUID PROCTHROTTLEMAX = new Guid.GUID("bc5038f7-23e0-4960-96da-33abaf5935ec"); // is alias // ################################################################################ // Display public final static Guid.GUID SUB_VIDEO = new Guid.GUID("7516b95f-f776-4464-8c53-06167f40cc99"); // is alias // Dim display after public final static Guid.GUID VIDEODIM = new Guid.GUID("17aaa29b-8b43-4b94-aafe-35f64daaf1ee"); // is alias // Turn off display after public final static Guid.GUID VIDEOIDLE = new Guid.GUID("3c0bc021-c8a8-4e07-a973-6b14cbcb2b7e"); // is alias // User annoyance timeout public final static Guid.GUID VIDEOANNOY = new Guid.GUID("82dbcf2d-cd67-40c5-bfdc-9f1a5ccd4663"); // is alias // Console lock display off timeout public final static Guid.GUID VIDEOCONLOCK = new Guid.GUID("8ec4b3a5-6868-48c2-be75-4f3044be88a7"); // is alias // Adaptive display public final static Guid.GUID VIDEOADAPT = new Guid.GUID("90959d22-d6a1-49b9-af93-bce885ad335b"); // is alias // Allow display required policy public final static Guid.GUID ALLOWDISPLAY = new Guid.GUID("a9ceb8da-cd46-44fb-a98b-02af69de4623"); // is alias // Display brightness public final static Guid.GUID VIDEOBRIGHTNESS = new Guid.GUID("aded5e82-b909-4619-9949-f5d71dac0bcb"); // no alias // Increase adaptive timeout by public final static Guid.GUID VIDEOADAPTINC = new Guid.GUID("eed904df-b142-4183-b10b-5a1197a37864"); // is alias // Dimmed display brightness public final static Guid.GUID VIDEOBRIGHTNESS_DIM = new Guid.GUID("f1fbfde2-a960-4165-9f88-50667911ce96"); // no alias // Enable adaptive brightness public final static Guid.GUID ADAPTBRIGHT = new Guid.GUID("fbd9aa66-9553-4097-ba44-ed6e9d65eab8"); // is alias // ################################################################################ // Presence Aware Power Behavior public final static Guid.GUID SUB_PRESENCE = new Guid.GUID("8619b916-e004-4dd8-9b66-dae86f806698"); // is alias // ################################################################################ // Multimedia settings public final static Guid.GUID SUB_MULTIMEDIASETTINGS = new Guid.GUID("9596fb26-9850-41fd-ac3e-f7c3c00afd4b"); // no alias // When sharing media public final static Guid.GUID WHENSHARINGMEDIA = new Guid.GUID("03680956-93bc-4294-bba6-4e0f09bb717f"); // no alias // When playing video public final static Guid.GUID WHENPLAYINGVIDEO = new Guid.GUID("34c7b99f-9a6d-4b3c-8dc7-b6693b78cef4"); // no alias // ################################################################################ // Battery public final static Guid.GUID SUB_BATTERY = new Guid.GUID("e73a048d-bf27-4f12-9731-8b2076e8891f"); // is alias // Critical battery action public final static Guid.GUID BATACTIONCRIT = new Guid.GUID("637ea02f-bbcb-4015-8e2c-a1c7b9c0b546"); // is alias // Low battery level public final static Guid.GUID BATLEVELLOW = new Guid.GUID("8183ba9a-e910-48da-8769-14ae6dc1170a"); // is alias // Critical battery level public final static Guid.GUID BATLEVELCRIT = new Guid.GUID("9a66d8d7-4ff7-4ef9-b5a2-5a326ca2a469"); // is alias // Low battery notification public final static Guid.GUID BATFLAGSLOW = new Guid.GUID("bcded951-187b-4d05-bccc-f7e51960c258"); // is alias // Low battery action public final static Guid.GUID BATACTIONLOW = new Guid.GUID("d8742dcb-3e6a-4b3c-b3fe-374623cdcf06"); // is alias // Reserve battery level public final static Guid.GUID BATRESERVELEVEL = new Guid.GUID("f3c5027d-cd16-4930-aa6b-90db844a8f00"); // no alias // ################################################################################ // unknown subgroup public final static Guid.GUID UNKNOWN_0000 = new Guid.GUID("29e6fab8-ce22-4a98-9d8b-75fe10526ac7"); // no alias // unknown subgroup public final static Guid.GUID UNKNOWN_0001 = new Guid.GUID("e276e160-7cb0-43c6-b20b-73f5dce39954"); // no alias // unknown subgroup public final static Guid.GUID UNKNOWN_0002 = new Guid.GUID("2adaa5b8-1289-467b-a809-b95c40d27b4c"); // no alias PowrProfJNA(PowrProf powrProf) { this.powrProf = powrProf; } public Guid.GUID jna_PowerGetActiveScheme() { PointerByReference pPowerScheme = new PointerByReference(); powrProf.PowerGetActiveScheme(null, pPowerScheme.getPointer()); Guid.GUID guid = new Guid.GUID(pPowerScheme.getValue()); if (pPowerScheme.getPointer() != null) { Kernel32.INSTANCE.LocalFree(pPowerScheme.getPointer()); } return guid; } public int jna_PowerReadACValueIndex(Guid.GUID schemeGuid, Guid.GUID subGroupOfPowerSettingsGuid, Guid.GUID powerSettingGuid) { WinDef.DWORDByReference acValueIndex = new WinDef.DWORDByReference(); WinDef.DWORD retval = powrProf.PowerReadACValueIndex(null, schemeGuid.getPointer(), subGroupOfPowerSettingsGuid.getPointer(), powerSettingGuid.getPointer(), acValueIndex); if (retval.intValue() != ERROR_SUCCESS) { throw new RuntimeException("retval.intValue() != ERROR_SUCCESS: " + retval.intValue()); } return acValueIndex.getValue().intValue(); } /** * same as {@link #jna_PowerReadACValueIndex(Guid.GUID, Guid.GUID, Guid.GUID)} but call PowerReadDCValueIndex */ public int jna_PowerReadDCValueIndex(Guid.GUID schemeGuid, Guid.GUID subGroupOfPowerSettingsGuid, Guid.GUID powerSettingGuid) { WinDef.DWORDByReference acValueIndex = new WinDef.DWORDByReference(); WinDef.DWORD retval = powrProf.PowerReadDCValueIndex(null, schemeGuid.getPointer(), subGroupOfPowerSettingsGuid.getPointer(), powerSettingGuid.getPointer(), acValueIndex); if (retval.intValue() != ERROR_SUCCESS) { throw new RuntimeException("retval.intValue() != ERROR_SUCCESS: " + retval.intValue()); } return acValueIndex.getValue().intValue(); } public void jna_PowerWriteACValueIndex(Guid.GUID schemeGuid, Guid.GUID subGroupOfPowerSettingsGuid, Guid.GUID powerSettingGuid, WinDef.DWORD acValueIndex) { WinDef.DWORD retval = powrProf.PowerWriteACValueIndex(null, schemeGuid.getPointer(), subGroupOfPowerSettingsGuid.getPointer(), powerSettingGuid.getPointer(), acValueIndex); if (retval.intValue() != ERROR_SUCCESS) { throw new RuntimeException("retval.intValue() != ERROR_SUCCESS: " + retval.intValue()); } } /** * same as {@link #jna_PowerWriteACValueIndex(Guid.GUID, Guid.GUID, Guid.GUID, WinDef.DWORD)} but call PowerWriteDCValueIndex */ public void jna_PowerWriteDCValueIndex(Guid.GUID schemeGuid, Guid.GUID subGroupOfPowerSettingsGuid, Guid.GUID powerSettingGuid, WinDef.DWORD acValueIndex) { WinDef.DWORD retval = powrProf.PowerWriteDCValueIndex(null, schemeGuid.getPointer(), subGroupOfPowerSettingsGuid.getPointer(), powerSettingGuid.getPointer(), acValueIndex); if (retval.intValue() != ERROR_SUCCESS) { throw new RuntimeException("retval.intValue() != ERROR_SUCCESS: " + retval.intValue()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.fo; // Java import org.xml.sax.Attributes; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xmlgraphics.util.QName; import org.apache.fop.apps.FopFactory; import org.apache.fop.fo.expr.PropertyException; import org.apache.fop.fo.properties.CommonAbsolutePosition; import org.apache.fop.fo.properties.CommonAccessibility; import org.apache.fop.fo.properties.CommonAural; import org.apache.fop.fo.properties.CommonBorderPaddingBackground; import org.apache.fop.fo.properties.CommonFont; import org.apache.fop.fo.properties.CommonHyphenation; import org.apache.fop.fo.properties.CommonMarginBlock; import org.apache.fop.fo.properties.CommonMarginInline; import org.apache.fop.fo.properties.CommonRelativePosition; import org.apache.fop.fo.properties.CommonTextDecoration; import org.apache.fop.fo.properties.Property; import org.apache.fop.fo.properties.PropertyMaker; /** * Class containing the collection of properties for a given FObj. */ public abstract class PropertyList { private static boolean[] inheritableProperty; /** reference to the parent FO's propertyList **/ protected PropertyList parentPropertyList = null; private FObj fobj = null; private static Log log = LogFactory.getLog(PropertyList.class); /** * Basic constructor. * @param fObjToAttach the FO this PropertyList should be attached to * @param parentPropertyList the PropertyList belonging to the new objects * parent */ public PropertyList(FObj fObjToAttach, PropertyList parentPropertyList) { this.fobj = fObjToAttach; this.parentPropertyList = parentPropertyList; } /** * @return the FObj object to which this propertyList is attached */ public FObj getFObj() { return this.fobj; } /** * @return the FObj object attached to the parentPropertyList */ public FObj getParentFObj() { if (parentPropertyList != null) { return parentPropertyList.getFObj(); } else { return null; } } /** * @return the FObj object attached to the parentPropetyList */ public PropertyList getParentPropertyList() { return parentPropertyList; } /** * Return the value explicitly specified on this FO. * @param propId The id of the property whose value is desired. * @return The value if the property is explicitly set or set by * a shorthand property, otherwise null. * @throws PropertyException ... */ public Property getExplicitOrShorthand(int propId) throws PropertyException { /* Handle request for one part of a compound property */ Property p = getExplicit(propId); if (p == null) { p = getShorthand(propId); } return p; } /** * Return the value explicitly specified on this FO. * @param propId The ID of the property whose value is desired. * @return The value if the property is explicitly set, otherwise null. */ public abstract Property getExplicit(int propId); /** * Set an value defined explicitly on this FO. * @param propId The ID of the property to set. * @param value The value of the property. */ public abstract void putExplicit(int propId, Property value); /** * Return the value of this property inherited by this FO. * Implements the inherited-property-value function. * The property must be inheritable! * @param propId The ID of the property whose value is desired. * @return The inherited value, otherwise null. * @throws PropertyException ... */ public Property getInherited(int propId) throws PropertyException { if (isInherited(propId)) { return getFromParent(propId); } else { // return the "initial" value return makeProperty(propId); } } /** * Return the property on the current FlowObject. If it isn't set explicitly, * this will try to compute it based on other properties, or if it is * inheritable, to return the inherited value. If all else fails, it returns * the default value. * @param propId The Constants ID of the property whose value is desired. * @return the Property corresponding to that name * @throws PropertyException if there is a problem evaluating the property */ public Property get(int propId) throws PropertyException { return get(propId, true, true); } /** * Return the property on the current FlowObject. Depending on the passed flags, * this will try to compute it based on other properties, or if it is * inheritable, to return the inherited value. If all else fails, it returns * the default value. * @param propId the property's id * @param bTryInherit true for inherited properties, or when the inherited * value is needed * @param bTryDefault true when the default value may be used as a last resort * @return the property * @throws PropertyException if there is a problem evaluating the property */ public Property get(int propId, boolean bTryInherit, boolean bTryDefault) throws PropertyException { PropertyMaker propertyMaker = findMaker(propId & Constants.PROPERTY_MASK); if (propertyMaker != null) { return propertyMaker.get(propId & Constants.COMPOUND_MASK, this, bTryInherit, bTryDefault); } return null; } /** * Return the "nearest" specified value for the given property. * Implements the from-nearest-specified-value function. * @param propId The ID of the property whose value is desired. * @return The computed value if the property is explicitly set on some * ancestor of the current FO, else the initial value. * @throws PropertyException if there an error occurred when getting the property */ public Property getNearestSpecified(int propId) throws PropertyException { Property p = null; PropertyList pList = parentPropertyList; while (pList != null) { p = pList.getExplicit(propId); if (p != null) { return p; } else { pList = pList.parentPropertyList; } } // If no explicit value found on any of the ancestor-nodes, // return initial (default) value. return makeProperty(propId); } /** * Return the value of this property on the parent of this FO. * Implements the from-parent function. * @param propId The Constants ID of the property whose value is desired. * @return The computed value on the parent or the initial value if this * FO is the root or is in a different namespace from its parent. * @throws PropertyException ... */ public Property getFromParent(int propId) throws PropertyException { if (parentPropertyList != null) { return parentPropertyList.get(propId); } else { return makeProperty(propId); } } /** * Select a writing mode dependent property ID based on value of writing mode property. * @param lrtb the property ID to return under lrtb writingmode. * @param rltb the property ID to return under rltb writingmode. * @param tbrl the property ID to return under tbrl writingmode. * @param tblr the property ID to return under tblr writingmode. * @return one of the property IDs, depending on the writing mode. */ public int selectFromWritingMode(int lrtb, int rltb, int tbrl, int tblr) { int propID; try { switch (get(Constants.PR_WRITING_MODE).getEnum()) { case Constants.EN_LR_TB: propID = lrtb; break; case Constants.EN_RL_TB: propID = rltb; break; case Constants.EN_TB_RL: propID = tbrl; break; case Constants.EN_TB_LR: propID = tblr; break; default: propID = -1; break; } } catch ( PropertyException e ) { propID = -1; } return propID; } private String addAttributeToList(Attributes attributes, String attributeName) throws ValidationException { String attributeValue = attributes.getValue(attributeName); if ( attributeValue != null ) { convertAttributeToProperty(attributes, attributeName, attributeValue); } return attributeValue; } /** * <p>Adds the attributes, passed in by the parser to the PropertyList.</p> * <p>Note that certain attributes are given priority in terms of order of * processing due to conversion dependencies, where the order is as follows:</p> * <ol> * <li>writing-mode</li> * <li>column-number</li> * <li>number-columns-spanned</li> * <li>font</li> * <li>font-size</li> * <li><emph>all others in order of appearance</emph></li> * </ol> * * @param attributes Collection of attributes passed to us from the parser. * @throws ValidationException if there is an attribute that does not * map to a property id (strict validation only) */ public void addAttributesToList(Attributes attributes) throws ValidationException { /* * Give writing-mode highest conversion priority. */ addAttributeToList(attributes, "writing-mode"); /* * If column-number/number-columns-spanned are specified, then we * need them before all others (possible from-table-column() on any * other property further in the list... */ addAttributeToList(attributes, "column-number"); addAttributeToList(attributes, "number-columns-spanned"); /* * If font-size is set on this FO, must set it first, since * other attributes specified in terms of "ems" depend on it. */ String checkValue = addAttributeToList(attributes, "font"); if (checkValue == null || "".equals(checkValue)) { /* * font shorthand wasn't specified, so still need to process * explicit font-size */ addAttributeToList(attributes, "font-size"); } String attributeNS; String attributeName; String attributeValue; FopFactory factory = getFObj().getUserAgent().getFactory(); for (int i = 0; i < attributes.getLength(); i++) { /* convert all attributes with the same namespace as the fo element * the "xml:lang" and "xml:base" properties are special cases */ attributeNS = attributes.getURI(i); attributeName = attributes.getQName(i); attributeValue = attributes.getValue(i); if (attributeNS == null || attributeNS.length() == 0 || "xml:lang".equals(attributeName) || "xml:base".equals(attributeName)) { convertAttributeToProperty(attributes, attributeName, attributeValue); } else if (!factory.isNamespaceIgnored(attributeNS)) { ElementMapping mapping = factory.getElementMappingRegistry().getElementMapping( attributeNS); QName attr = new QName(attributeNS, attributeName); if (mapping != null) { if (mapping.isAttributeProperty(attr) && mapping.getStandardPrefix() != null) { convertAttributeToProperty(attributes, mapping.getStandardPrefix() + ":" + attr.getLocalName(), attributeValue); } else { getFObj().addForeignAttribute(attr, attributeValue); } } else { handleInvalidProperty(attr); } } } } /** * Validates a property name. * @param propertyName the property name to check * @return true if the base property name and the subproperty name (if any) * can be correctly mapped to an id */ protected boolean isValidPropertyName(String propertyName) { int propId = FOPropertyMapping.getPropertyId( findBasePropertyName(propertyName)); int subpropId = FOPropertyMapping.getSubPropertyId( findSubPropertyName(propertyName)); return !(propId == -1 || (subpropId == -1 && findSubPropertyName(propertyName) != null)); } /** * * @param attributes Collection of attributes * @param attributeName Attribute name to convert * @param attributeValue Attribute value to assign to property * @throws ValidationException in case the property name is invalid * for the FO namespace */ private void convertAttributeToProperty(Attributes attributes, String attributeName, String attributeValue) throws ValidationException { if (attributeName.startsWith("xmlns:") || "xmlns".equals(attributeName)) { /* Ignore namespace declarations if the XML parser/XSLT processor * reports them as 'regular' attributes */ return; } if (attributeValue != null) { /* Handle "compound" properties, ex. space-before.minimum */ String basePropertyName = findBasePropertyName(attributeName); String subPropertyName = findSubPropertyName(attributeName); int propId = FOPropertyMapping.getPropertyId(basePropertyName); int subpropId = FOPropertyMapping.getSubPropertyId(subPropertyName); if (propId == -1 || (subpropId == -1 && subPropertyName != null)) { handleInvalidProperty(new QName(null, attributeName)); } FObj parentFO = fobj.findNearestAncestorFObj(); PropertyMaker propertyMaker = findMaker(propId); if (propertyMaker == null) { log.warn("No PropertyMaker registered for " + attributeName + ". Ignoring property."); return; } try { Property prop = null; if (subPropertyName == null) { // base attribute only found /* Do nothing if the base property has already been created. * This is e.g. the case when a compound attribute was * specified before the base attribute; in these cases * the base attribute was already created in * findBaseProperty() */ if (getExplicit(propId) != null) { return; } prop = propertyMaker.make(this, attributeValue, parentFO); } else { // e.g. "leader-length.maximum" Property baseProperty = findBaseProperty(attributes, parentFO, propId, basePropertyName, propertyMaker); prop = propertyMaker.make(baseProperty, subpropId, this, attributeValue, parentFO); } if (prop != null) { putExplicit(propId, prop); } } catch (PropertyException e) { fobj.getFOValidationEventProducer().invalidPropertyValue(this, fobj.getName(), attributeName, attributeValue, e, fobj.locator); } } } private Property findBaseProperty(Attributes attributes, FObj parentFO, int propId, String basePropertyName, PropertyMaker propertyMaker) throws PropertyException { /* If the baseProperty has already been created, return it * e.g. <fo:leader xxxx="120pt" xxxx.maximum="200pt"... /> */ Property baseProperty = getExplicit(propId); if (baseProperty != null) { return baseProperty; } /* Otherwise If it is specified later in this list of Attributes, create it now * e.g. <fo:leader xxxx.maximum="200pt" xxxx="200pt"... /> */ String basePropertyValue = attributes.getValue(basePropertyName); if (basePropertyValue != null && propertyMaker != null) { baseProperty = propertyMaker.make(this, basePropertyValue, parentFO); return baseProperty; } return null; // could not find base property } /** * Handles an invalid property. * @param attr the invalid attribute * @throws ValidationException if an exception needs to be thrown depending on the * validation settings */ protected void handleInvalidProperty(QName attr) throws ValidationException { if (!attr.getQName().startsWith("xmlns")) { fobj.getFOValidationEventProducer().invalidProperty(this, fobj.getName(), attr, true, fobj.locator); } } /** * Finds the first or base part (up to any period) of an attribute name. * For example, if input is "space-before.minimum", should return * "space-before". * @param attributeName String to be atomized * @return the base portion of the attribute */ protected static String findBasePropertyName(String attributeName) { int separatorCharIndex = attributeName.indexOf('.'); String basePropertyName = attributeName; if (separatorCharIndex > -1) { basePropertyName = attributeName.substring(0, separatorCharIndex); } return basePropertyName; } /** * Finds the second or sub part (portion past any period) of an attribute * name. For example, if input is "space-before.minimum", should return * "minimum". * @param attributeName String to be atomized * @return the sub portion of the attribute */ protected static String findSubPropertyName(String attributeName) { int separatorCharIndex = attributeName.indexOf('.'); String subpropertyName = null; if (separatorCharIndex > -1) { subpropertyName = attributeName.substring(separatorCharIndex + 1); } return subpropertyName; } /** * @param propId ID of property * @return new Property object * @throws PropertyException if there's a problem while processing the property */ private Property getShorthand(int propId) throws PropertyException { PropertyMaker propertyMaker = findMaker(propId); if (propertyMaker != null) { return propertyMaker.getShorthand(this); } else { //log.error("no Maker for " + propertyName); return null; } } /** * @param propId ID of property * @return new Property object * @throws PropertyException if there's a problem while processing the property */ private Property makeProperty(int propId) throws PropertyException { PropertyMaker propertyMaker = findMaker(propId); if (propertyMaker != null) { return propertyMaker.make(this); } else { //log.error("property " + propertyName // + " ignored"); } return null; } /** * @param propId ID of property * @return isInherited value from the requested Property.Maker */ private boolean isInherited(int propId) { if (inheritableProperty == null) { inheritableProperty = new boolean[Constants.PROPERTY_COUNT + 1]; PropertyMaker maker = null; for (int prop = 1; prop <= Constants.PROPERTY_COUNT; prop++) { maker = findMaker(prop); inheritableProperty[prop] = (maker != null && maker.isInherited()); } } return inheritableProperty[propId]; } /** * @param propId Id of property * @return the Property.Maker for this property */ private PropertyMaker findMaker(int propId) { if (propId < 1 || propId > Constants.PROPERTY_COUNT) { return null; } else { return FObj.getPropertyMakerFor(propId); } } /** * Constructs a BorderAndPadding object. * @return a BorderAndPadding object * @throws PropertyException if there's a problem while processing the properties */ public CommonBorderPaddingBackground getBorderPaddingBackgroundProps() throws PropertyException { return CommonBorderPaddingBackground.getInstance(this); } /** * Constructs a CommonHyphenation object. * @return the CommonHyphenation object * @throws PropertyException if there's a problem while processing the properties */ public CommonHyphenation getHyphenationProps() throws PropertyException { return CommonHyphenation.getInstance(this); } /** * Constructs a CommonMarginBlock object. * @return the CommonMarginBlock object * @throws PropertyException if there's a problem while processing the properties */ public CommonMarginBlock getMarginBlockProps() throws PropertyException { return new CommonMarginBlock(this); } /** * Constructs a CommonMarginInline object. * @return the CommonMarginInline object * @throws PropertyException if there's a problem while processing the properties */ public CommonMarginInline getMarginInlineProps() throws PropertyException { return new CommonMarginInline(this); } /** * Constructs a CommonAccessibility object. * @return the CommonAccessibility object * @throws PropertyException if there's a problem while processing the properties */ public CommonAccessibility getAccessibilityProps() throws PropertyException { return new CommonAccessibility(this); } /** * Constructs a CommonAural object. * @return the CommonAural object * @throws PropertyException if there's a problem while processing the properties */ public CommonAural getAuralProps() throws PropertyException { CommonAural props = new CommonAural(this); return props; } /** * Constructs a RelativePositionProps objects. * @return a RelativePositionProps object * @throws PropertyException if there's a problem while processing the properties */ public CommonRelativePosition getRelativePositionProps() throws PropertyException { return new CommonRelativePosition(this); } /** * Constructs a CommonAbsolutePosition object. * @return the CommonAbsolutePosition object * @throws PropertyException if there's a problem while processing the properties */ public CommonAbsolutePosition getAbsolutePositionProps() throws PropertyException { return new CommonAbsolutePosition(this); } /** * Constructs a CommonFont object. * * @return A CommonFont object * @throws PropertyException if there's a problem while processing the properties */ public CommonFont getFontProps() throws PropertyException { return CommonFont.getInstance(this); } /** * Constructs a CommonTextDecoration object. * @return a CommonTextDecoration object * @throws PropertyException if there's a problem while processing the properties */ public CommonTextDecoration getTextDecorationProps() throws PropertyException { return CommonTextDecoration.createFromPropertyList(this); } }
/* * Copyright 2012 Metamarkets Group Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.druid.extendedset.intset; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import com.google.common.collect.MinMaxPriorityQueue; import com.google.common.collect.UnmodifiableIterator; import com.google.common.primitives.Ints; import io.druid.extendedset.utilities.IntList; import java.nio.ByteBuffer; import java.nio.IntBuffer; import java.util.Arrays; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; public class ImmutableConciseSet { private final static int CHUNK_SIZE = 10000; private final IntBuffer words; private final int lastWordIndex; private final int size; public ImmutableConciseSet() { this.words = null; this.lastWordIndex = -1; this.size = 0; } public ImmutableConciseSet(ByteBuffer byteBuffer) { this.words = byteBuffer.asIntBuffer(); this.lastWordIndex = words.capacity() - 1; this.size = calcSize(); } public ImmutableConciseSet(IntBuffer buffer) { this.words = buffer; this.lastWordIndex = (words == null || buffer.capacity() == 0) ? -1 : words.capacity() - 1; this.size = calcSize(); } public static ImmutableConciseSet newImmutableFromMutable(ConciseSet conciseSet) { if (conciseSet == null || conciseSet.isEmpty()) { return new ImmutableConciseSet(); } return new ImmutableConciseSet(IntBuffer.wrap(conciseSet.getWords())); } public static int compareInts(int x, int y) { return (x < y) ? -1 : ((x == y) ? 0 : 1); } public static ImmutableConciseSet union(ImmutableConciseSet... sets) { return union(Arrays.asList(sets)); } public static ImmutableConciseSet union(Iterable<ImmutableConciseSet> sets) { return union(sets.iterator()); } public static ImmutableConciseSet union(Iterator<ImmutableConciseSet> sets) { ImmutableConciseSet partialResults = doUnion(Iterators.limit(sets, CHUNK_SIZE)); while (sets.hasNext()) { final UnmodifiableIterator<ImmutableConciseSet> partialIter = Iterators.singletonIterator(partialResults); partialResults = doUnion(Iterators.<ImmutableConciseSet>concat(partialIter, Iterators.limit(sets, CHUNK_SIZE))); } return partialResults; } public static ImmutableConciseSet intersection(ImmutableConciseSet... sets) { return intersection(Arrays.asList(sets)); } public static ImmutableConciseSet intersection(Iterable<ImmutableConciseSet> sets) { return intersection(sets.iterator()); } public static ImmutableConciseSet intersection(Iterator<ImmutableConciseSet> sets) { ImmutableConciseSet partialResults = doIntersection(Iterators.limit(sets, CHUNK_SIZE)); while (sets.hasNext()) { final UnmodifiableIterator<ImmutableConciseSet> partialIter = Iterators.singletonIterator(partialResults); partialResults = doIntersection( Iterators.<ImmutableConciseSet>concat(Iterators.limit(sets, CHUNK_SIZE), partialIter) ); } return partialResults; } public static ImmutableConciseSet complement(ImmutableConciseSet set) { return doComplement(set); } public static ImmutableConciseSet complement(ImmutableConciseSet set, int length) { if (length <= 0) { return new ImmutableConciseSet(); } // special case when the set is empty and we need a concise set of ones if (set == null || set.isEmpty()) { final int leftoverBits = length % 31; final int onesBlocks = length / 31; final int[] words; if (onesBlocks > 0) { if (leftoverBits > 0) { words = new int[]{ ConciseSetUtils.SEQUENCE_BIT | (onesBlocks - 1), ConciseSetUtils.onesUntil(leftoverBits) }; } else { words = new int[]{ ConciseSetUtils.SEQUENCE_BIT | (onesBlocks - 1) }; } } else { if (leftoverBits > 0) { words = new int[]{ConciseSetUtils.onesUntil(leftoverBits)}; } else { words = new int[]{}; } } ConciseSet newSet = new ConciseSet(words, false); return ImmutableConciseSet.newImmutableFromMutable(newSet); } IntList retVal = new IntList(); int endIndex = length - 1; int wordsWalked = 0; int last = 0; WordIterator iter = set.newWordIterator(); while (iter.hasNext()) { int word = iter.next(); wordsWalked = iter.wordsWalked; if (ConciseSetUtils.isLiteral(word)) { retVal.add(ConciseSetUtils.ALL_ZEROS_LITERAL | ~word); } else { retVal.add(ConciseSetUtils.SEQUENCE_BIT ^ word); } } last = set.getLast(); int distFromLastWordBoundary = ConciseSetUtils.maxLiteralLengthModulus(last); int distToNextWordBoundary = ConciseSetUtils.MAX_LITERAL_LENGTH - distFromLastWordBoundary - 1; last = (last < 0) ? 0 : last + distToNextWordBoundary; int diff = endIndex - last; // only append a new literal when the end index is beyond the current word if (diff > 0) { // first check if the difference can be represented in 31 bits if (diff <= ConciseSetUtils.MAX_LITERAL_LENGTH) { retVal.add(ConciseSetUtils.ALL_ONES_LITERAL); } else { // create a fill from last set bit to endIndex for number of 31 bit blocks minus one int endIndexWordCount = ConciseSetUtils.maxLiteralLengthDivision(endIndex); retVal.add(ConciseSetUtils.SEQUENCE_BIT | (endIndexWordCount - wordsWalked - 1)); retVal.add(ConciseSetUtils.ALL_ONES_LITERAL); } } // clear bits after last set value int lastWord = retVal.get(retVal.length() - 1); if (ConciseSetUtils.isLiteral(lastWord)) { lastWord = ConciseSetUtils.clearBitsAfterInLastWord( lastWord, ConciseSetUtils.maxLiteralLengthModulus(endIndex) ); } retVal.set(retVal.length() - 1, lastWord); trimZeros(retVal); if (retVal.isEmpty()) { return new ImmutableConciseSet(); } return compact(new ImmutableConciseSet(IntBuffer.wrap(retVal.toArray()))); } public static ImmutableConciseSet compact(ImmutableConciseSet set) { IntList retVal = new IntList(); WordIterator itr = set.newWordIterator(); while (itr.hasNext()) { addAndCompact(retVal, itr.next()); } return new ImmutableConciseSet(IntBuffer.wrap(retVal.toArray())); } private static void addAndCompact(IntList set, int wordToAdd) { int length = set.length(); if (set.isEmpty()) { set.add(wordToAdd); return; } int last = set.get(length - 1); int newWord = 0; if (ConciseSetUtils.isAllOnesLiteral(last)) { if (ConciseSetUtils.isAllOnesLiteral(wordToAdd)) { newWord = 0x40000001; } else if (ConciseSetUtils.isOneSequence(wordToAdd) && ConciseSetUtils.getFlippedBit(wordToAdd) == -1) { newWord = wordToAdd + 1; } } else if (ConciseSetUtils.isOneSequence(last)) { if (ConciseSetUtils.isAllOnesLiteral(wordToAdd)) { newWord = last + 1; } else if (ConciseSetUtils.isOneSequence(wordToAdd) && ConciseSetUtils.getFlippedBit(wordToAdd) == -1) { newWord = last + ConciseSetUtils.getSequenceNumWords(wordToAdd); } } else if (ConciseSetUtils.isAllZerosLiteral(last)) { if (ConciseSetUtils.isAllZerosLiteral(wordToAdd)) { newWord = 0x00000001; } else if (ConciseSetUtils.isZeroSequence(wordToAdd) && ConciseSetUtils.getFlippedBit(wordToAdd) == -1) { newWord = wordToAdd + 1; } } else if (ConciseSetUtils.isZeroSequence(last)) { if (ConciseSetUtils.isAllZerosLiteral(wordToAdd)) { newWord = last + 1; } else if (ConciseSetUtils.isZeroSequence(wordToAdd) && ConciseSetUtils.getFlippedBit(wordToAdd) == -1) { newWord = last + ConciseSetUtils.getSequenceNumWords(wordToAdd); } } else if (ConciseSetUtils.isLiteralWithSingleOneBit(last)) { int position = Integer.numberOfTrailingZeros(last) + 1; if (ConciseSetUtils.isAllZerosLiteral(wordToAdd)) { newWord = 0x00000001 | (position << 25); } else if (ConciseSetUtils.isZeroSequence(wordToAdd) && ConciseSetUtils.getFlippedBit(wordToAdd) == -1) { newWord = (wordToAdd + 1) | (position << 25); } } else if (ConciseSetUtils.isLiteralWithSingleZeroBit(last)) { int position = Integer.numberOfTrailingZeros(~last) + 1; if (ConciseSetUtils.isAllOnesLiteral(wordToAdd)) { newWord = 0x40000001 | (position << 25); } else if (ConciseSetUtils.isOneSequence(wordToAdd) && ConciseSetUtils.getFlippedBit(wordToAdd) == -1) { newWord = (wordToAdd + 1) | (position << 25); } } if (newWord != 0) { set.set(length - 1, newWord); } else { set.add(wordToAdd); } } private static ImmutableConciseSet doUnion(Iterator<ImmutableConciseSet> sets) { IntList retVal = new IntList(); // lhs = current word position, rhs = the iterator // Comparison is first by index, then one fills > literals > zero fills // one fills are sorted by length (longer one fills have priority) // similarily, shorter zero fills have priority MinMaxPriorityQueue<WordHolder> theQ = MinMaxPriorityQueue.orderedBy( new Comparator<WordHolder>() { @Override public int compare(WordHolder h1, WordHolder h2) { int w1 = h1.getWord(); int w2 = h2.getWord(); int s1 = h1.getIterator().startIndex; int s2 = h2.getIterator().startIndex; if (s1 != s2) { return compareInts(s1, s2); } if (ConciseSetUtils.isOneSequence(w1)) { if (ConciseSetUtils.isOneSequence(w2)) { return -compareInts(ConciseSetUtils.getSequenceNumWords(w1), ConciseSetUtils.getSequenceNumWords(w2)); } return -1; } else if (ConciseSetUtils.isLiteral(w1)) { if (ConciseSetUtils.isOneSequence(w2)) { return 1; } else if (ConciseSetUtils.isLiteral(w2)) { return 0; } return -1; } else { if (!ConciseSetUtils.isZeroSequence(w2)) { return 1; } return compareInts(ConciseSetUtils.getSequenceNumWords(w1), ConciseSetUtils.getSequenceNumWords(w2)); } } } ).create(); // populate priority queue while (sets.hasNext()) { ImmutableConciseSet set = sets.next(); if (set != null && !set.isEmpty()) { WordIterator itr = set.newWordIterator(); theQ.add(new WordHolder(itr.next(), itr)); } } int currIndex = 0; while (!theQ.isEmpty()) { // create a temp list to hold everything that will get pushed back into the priority queue after each run List<WordHolder> wordsToAdd = Lists.newArrayList(); // grab the top element from the priority queue WordHolder curr = theQ.poll(); int word = curr.getWord(); WordIterator itr = curr.getIterator(); // if the next word in the queue starts at a different point than where we ended off we need to create a zero gap // to fill the space if (currIndex < itr.startIndex) { addAndCompact(retVal, itr.startIndex - currIndex - 1); currIndex = itr.startIndex; } if (ConciseSetUtils.isOneSequence(word)) { // extract a literal from the flip bits of the one sequence int flipBitLiteral = ConciseSetUtils.getLiteralFromOneSeqFlipBit(word); // advance everything past the longest ones sequence WordHolder nextVal = theQ.peek(); while (nextVal != null && nextVal.getIterator().startIndex < itr.wordsWalked) { WordHolder entry = theQ.poll(); int w = entry.getWord(); WordIterator i = entry.getIterator(); if (i.startIndex == itr.startIndex) { // if a literal was created from a flip bit, OR it with other literals or literals from flip bits in the same // position if (ConciseSetUtils.isOneSequence(w)) { flipBitLiteral |= ConciseSetUtils.getLiteralFromOneSeqFlipBit(w); } else if (ConciseSetUtils.isLiteral(w)) { flipBitLiteral |= w; } else { flipBitLiteral |= ConciseSetUtils.getLiteralFromZeroSeqFlipBit(w); } } i.advanceTo(itr.wordsWalked); if (i.hasNext()) { wordsToAdd.add(new WordHolder(i.next(), i)); } nextVal = theQ.peek(); } // advance longest one literal forward and push result back to priority queue // if a flip bit is still needed, put it in the correct position int newWord = word & 0xC1FFFFFF; if (flipBitLiteral != ConciseSetUtils.ALL_ONES_LITERAL) { flipBitLiteral ^= ConciseSetUtils.ALL_ONES_LITERAL; int position = Integer.numberOfTrailingZeros(flipBitLiteral) + 1; newWord |= (position << 25); } addAndCompact(retVal, newWord); currIndex = itr.wordsWalked; if (itr.hasNext()) { wordsToAdd.add(new WordHolder(itr.next(), itr)); } } else if (ConciseSetUtils.isLiteral(word)) { // advance all other literals WordHolder nextVal = theQ.peek(); while (nextVal != null && nextVal.getIterator().startIndex == itr.startIndex) { WordHolder entry = theQ.poll(); int w = entry.getWord(); WordIterator i = entry.getIterator(); // if we still have zero fills with flipped bits, OR them here if (ConciseSetUtils.isLiteral(w)) { word |= w; } else { int flipBitLiteral = ConciseSetUtils.getLiteralFromZeroSeqFlipBit(w); if (flipBitLiteral != ConciseSetUtils.ALL_ZEROS_LITERAL) { word |= flipBitLiteral; i.advanceTo(itr.wordsWalked); } } if (i.hasNext()) { wordsToAdd.add(new WordHolder(i.next(), i)); } nextVal = theQ.peek(); } // advance the set with the current literal forward and push result back to priority queue addAndCompact(retVal, word); currIndex++; if (itr.hasNext()) { wordsToAdd.add(new WordHolder(itr.next(), itr)); } } else { // zero fills int flipBitLiteral; WordHolder nextVal = theQ.peek(); while (nextVal != null && nextVal.getIterator().startIndex == itr.startIndex) { // check if literal can be created flip bits of other zero sequences WordHolder entry = theQ.poll(); int w = entry.getWord(); WordIterator i = entry.getIterator(); flipBitLiteral = ConciseSetUtils.getLiteralFromZeroSeqFlipBit(w); if (flipBitLiteral != ConciseSetUtils.ALL_ZEROS_LITERAL) { wordsToAdd.add(new WordHolder(flipBitLiteral, i)); } else if (i.hasNext()) { wordsToAdd.add(new WordHolder(i.next(), i)); } nextVal = theQ.peek(); } // check if a literal needs to be created from the flipped bits of this sequence flipBitLiteral = ConciseSetUtils.getLiteralFromZeroSeqFlipBit(word); if (flipBitLiteral != ConciseSetUtils.ALL_ZEROS_LITERAL) { wordsToAdd.add(new WordHolder(flipBitLiteral, itr)); } else if (itr.hasNext()) { wordsToAdd.add(new WordHolder(itr.next(), itr)); } } theQ.addAll(wordsToAdd); } if (retVal.isEmpty()) { return new ImmutableConciseSet(); } return new ImmutableConciseSet(IntBuffer.wrap(retVal.toArray())); } public static ImmutableConciseSet doIntersection(Iterator<ImmutableConciseSet> sets) { IntList retVal = new IntList(); // lhs = current word position, rhs = the iterator // Comparison is first by index, then zero fills > literals > one fills // zero fills are sorted by length (longer zero fills have priority) // similarily, shorter one fills have priority MinMaxPriorityQueue<WordHolder> theQ = MinMaxPriorityQueue.orderedBy( new Comparator<WordHolder>() { @Override public int compare(WordHolder h1, WordHolder h2) { int w1 = h1.getWord(); int w2 = h2.getWord(); int s1 = h1.getIterator().startIndex; int s2 = h2.getIterator().startIndex; if (s1 != s2) { return compareInts(s1, s2); } if (ConciseSetUtils.isZeroSequence(w1)) { if (ConciseSetUtils.isZeroSequence(w2)) { return -compareInts(ConciseSetUtils.getSequenceNumWords(w1), ConciseSetUtils.getSequenceNumWords(w2)); } return -1; } else if (ConciseSetUtils.isLiteral(w1)) { if (ConciseSetUtils.isZeroSequence(w2)) { return 1; } else if (ConciseSetUtils.isLiteral(w2)) { return 0; } return -1; } else { if (!ConciseSetUtils.isOneSequence(w2)) { return 1; } return compareInts(ConciseSetUtils.getSequenceNumWords(w1), ConciseSetUtils.getSequenceNumWords(w2)); } } } ).create(); // populate priority queue while (sets.hasNext()) { ImmutableConciseSet set = sets.next(); if (set == null || set.isEmpty()) { return new ImmutableConciseSet(); } WordIterator itr = set.newWordIterator(); theQ.add(new WordHolder(itr.next(), itr)); } int currIndex = 0; int wordsWalkedAtSequenceEnd = Integer.MAX_VALUE; while (!theQ.isEmpty()) { // create a temp list to hold everything that will get pushed back into the priority queue after each run List<WordHolder> wordsToAdd = Lists.newArrayList(); // grab the top element from the priority queue WordHolder curr = theQ.poll(); int word = curr.getWord(); WordIterator itr = curr.getIterator(); // if a sequence has ended, we can break out because of Boolean logic if (itr.startIndex >= wordsWalkedAtSequenceEnd) { break; } // if the next word in the queue starts at a different point than where we ended off we need to create a one gap // to fill the space if (currIndex < itr.startIndex) { // number of 31 bit blocks that compromise the fill minus one addAndCompact(retVal, (ConciseSetUtils.SEQUENCE_BIT | (itr.startIndex - currIndex - 1))); currIndex = itr.startIndex; } if (ConciseSetUtils.isZeroSequence(word)) { // extract a literal from the flip bits of the zero sequence int flipBitLiteral = ConciseSetUtils.getLiteralFromZeroSeqFlipBit(word); // advance everything past the longest zero sequence WordHolder nextVal = theQ.peek(); while (nextVal != null && nextVal.getIterator().startIndex < itr.wordsWalked) { WordHolder entry = theQ.poll(); int w = entry.getWord(); WordIterator i = entry.getIterator(); if (i.startIndex == itr.startIndex) { // if a literal was created from a flip bit, AND it with other literals or literals from flip bits in the same // position if (ConciseSetUtils.isZeroSequence(w)) { flipBitLiteral &= ConciseSetUtils.getLiteralFromZeroSeqFlipBit(w); } else if (ConciseSetUtils.isLiteral(w)) { flipBitLiteral &= w; } else { flipBitLiteral &= ConciseSetUtils.getLiteralFromOneSeqFlipBit(w); } } i.advanceTo(itr.wordsWalked); if (i.hasNext()) { wordsToAdd.add(new WordHolder(i.next(), i)); } else { wordsWalkedAtSequenceEnd = Math.min(i.wordsWalked, wordsWalkedAtSequenceEnd); } nextVal = theQ.peek(); } // advance longest zero literal forward and push result back to priority queue // if a flip bit is still needed, put it in the correct position int newWord = word & 0xC1FFFFFF; if (flipBitLiteral != ConciseSetUtils.ALL_ZEROS_LITERAL) { int position = Integer.numberOfTrailingZeros(flipBitLiteral) + 1; newWord = (word & 0xC1FFFFFF) | (position << 25); } addAndCompact(retVal, newWord); currIndex = itr.wordsWalked; if (itr.hasNext()) { wordsToAdd.add(new WordHolder(itr.next(), itr)); } else { wordsWalkedAtSequenceEnd = Math.min(itr.wordsWalked, wordsWalkedAtSequenceEnd); } } else if (ConciseSetUtils.isLiteral(word)) { // advance all other literals WordHolder nextVal = theQ.peek(); while (nextVal != null && nextVal.getIterator().startIndex == itr.startIndex) { WordHolder entry = theQ.poll(); int w = entry.getWord(); WordIterator i = entry.getIterator(); // if we still have one fills with flipped bits, AND them here if (ConciseSetUtils.isLiteral(w)) { word &= w; } else { int flipBitLiteral = ConciseSetUtils.getLiteralFromOneSeqFlipBit(w); if (flipBitLiteral != ConciseSetUtils.ALL_ONES_LITERAL) { word &= flipBitLiteral; i.advanceTo(itr.wordsWalked); } } if (i.hasNext()) { wordsToAdd.add(new WordHolder(i.next(), i)); } else { wordsWalkedAtSequenceEnd = Math.min(i.wordsWalked, wordsWalkedAtSequenceEnd); } nextVal = theQ.peek(); } // advance the set with the current literal forward and push result back to priority queue addAndCompact(retVal, word); currIndex++; if (itr.hasNext()) { wordsToAdd.add(new WordHolder(itr.next(), itr)); } else { wordsWalkedAtSequenceEnd = Math.min(itr.wordsWalked, wordsWalkedAtSequenceEnd); } } else { // one fills int flipBitLiteral; WordHolder nextVal = theQ.peek(); while (nextVal != null && nextVal.getIterator().startIndex == itr.startIndex) { // check if literal can be created flip bits of other one sequences WordHolder entry = theQ.poll(); int w = entry.getWord(); WordIterator i = entry.getIterator(); flipBitLiteral = ConciseSetUtils.getLiteralFromOneSeqFlipBit(w); if (flipBitLiteral != ConciseSetUtils.ALL_ONES_LITERAL) { wordsToAdd.add(new WordHolder(flipBitLiteral, i)); } else if (i.hasNext()) { wordsToAdd.add(new WordHolder(i.next(), i)); } else { wordsWalkedAtSequenceEnd = Math.min(i.wordsWalked, wordsWalkedAtSequenceEnd); } nextVal = theQ.peek(); } // check if a literal needs to be created from the flipped bits of this sequence flipBitLiteral = ConciseSetUtils.getLiteralFromOneSeqFlipBit(word); if (flipBitLiteral != ConciseSetUtils.ALL_ONES_LITERAL) { wordsToAdd.add(new WordHolder(flipBitLiteral, itr)); } else if (itr.hasNext()) { wordsToAdd.add(new WordHolder(itr.next(), itr)); } else { wordsWalkedAtSequenceEnd = Math.min(itr.wordsWalked, wordsWalkedAtSequenceEnd); } } theQ.addAll(wordsToAdd); } // fill in any missing one sequences if (currIndex < wordsWalkedAtSequenceEnd) { addAndCompact(retVal, (ConciseSetUtils.SEQUENCE_BIT | (wordsWalkedAtSequenceEnd - currIndex - 1))); } if (retVal.isEmpty()) { return new ImmutableConciseSet(); } return new ImmutableConciseSet(IntBuffer.wrap(retVal.toArray())); } public static ImmutableConciseSet doComplement(ImmutableConciseSet set) { if (set == null || set.isEmpty()) { return new ImmutableConciseSet(); } IntList retVal = new IntList(); WordIterator iter = set.newWordIterator(); while (iter.hasNext()) { int word = iter.next(); if (ConciseSetUtils.isLiteral(word)) { retVal.add(ConciseSetUtils.ALL_ZEROS_LITERAL | ~word); } else { retVal.add(ConciseSetUtils.SEQUENCE_BIT ^ word); } } // do not complement after the last element int lastWord = retVal.get(retVal.length() - 1); if (ConciseSetUtils.isLiteral(lastWord)) { lastWord = ConciseSetUtils.clearBitsAfterInLastWord( lastWord, ConciseSetUtils.maxLiteralLengthModulus(set.getLast()) ); } retVal.set(retVal.length() - 1, lastWord); trimZeros(retVal); if (retVal.isEmpty()) { return new ImmutableConciseSet(); } return new ImmutableConciseSet(IntBuffer.wrap(retVal.toArray())); } // Based on the ConciseSet implementation by Alessandro Colantonio private static void trimZeros(IntList set) { // loop over ALL_ZEROS_LITERAL words int w; int last = set.length() - 1; do { w = set.get(last); if (w == ConciseSetUtils.ALL_ZEROS_LITERAL) { set.set(last, 0); last--; } else if (ConciseSetUtils.isZeroSequence(w)) { if (ConciseSetUtils.isSequenceWithNoBits(w)) { set.set(last, 0); last--; } else { // convert the sequence in a 1-bit literal word set.set(last, ConciseSetUtils.getLiteral(w, false)); return; } } else { // one sequence or literal return; } if (set.isEmpty() || last == -1) { return; } } while (true); } public byte[] toBytes() { if (words == null) { return new byte[]{}; } ByteBuffer buf = ByteBuffer.allocate(words.capacity() * Ints.BYTES); buf.asIntBuffer().put(words.asReadOnlyBuffer()); return buf.array(); } public int getLastWordIndex() { return lastWordIndex; } // Based on the ConciseSet implementation by Alessandro Colantonio private int calcSize() { int retVal = 0; for (int i = 0; i <= lastWordIndex; i++) { int w = words.get(i); if (ConciseSetUtils.isLiteral(w)) { retVal += ConciseSetUtils.getLiteralBitCount(w); } else { if (ConciseSetUtils.isZeroSequence(w)) { if (!ConciseSetUtils.isSequenceWithNoBits(w)) { retVal++; } } else { retVal += ConciseSetUtils.maxLiteralLengthMultiplication(ConciseSetUtils.getSequenceCount(w) + 1); if (!ConciseSetUtils.isSequenceWithNoBits(w)) { retVal--; } } } } return retVal; } public int size() { return size; } // Based on the ConciseSet implementation by Alessandro Colantonio public int getLast() { if (isEmpty()) { return -1; } int last = 0; for (int i = 0; i <= lastWordIndex; i++) { int w = words.get(i); if (ConciseSetUtils.isLiteral(w)) { last += ConciseSetUtils.MAX_LITERAL_LENGTH; } else { last += ConciseSetUtils.maxLiteralLengthMultiplication(ConciseSetUtils.getSequenceCount(w) + 1); } } int w = words.get(lastWordIndex); if (ConciseSetUtils.isLiteral(w)) { last -= Integer.numberOfLeadingZeros(ConciseSetUtils.getLiteralBits(w)); } else { last--; } return last; } public boolean contains(final int integer) { if (isEmpty()) { return false; } final IntSet.IntIterator intIterator = iterator(); intIterator.skipAllBefore(integer); return intIterator.hasNext() && intIterator.next() == integer; } // Based on the ConciseSet implementation by Alessandro Colantonio public int get(int i) { if (i < 0) { throw new IndexOutOfBoundsException(); } // initialize data int firstSetBitInWord = 0; int position = i; int setBitsInCurrentWord = 0; for (int j = 0; j <= lastWordIndex; j++) { int w = words.get(j); if (ConciseSetUtils.isLiteral(w)) { // number of bits in the current word setBitsInCurrentWord = ConciseSetUtils.getLiteralBitCount(w); // check if the desired bit is in the current word if (position < setBitsInCurrentWord) { int currSetBitInWord = -1; for (; position >= 0; position--) { currSetBitInWord = Integer.numberOfTrailingZeros(w & (0xFFFFFFFF << (currSetBitInWord + 1))); } return firstSetBitInWord + currSetBitInWord; } // skip the 31-bit block firstSetBitInWord += ConciseSetUtils.MAX_LITERAL_LENGTH; } else { // number of involved bits (31 * blocks) int sequenceLength = ConciseSetUtils.maxLiteralLengthMultiplication(ConciseSetUtils.getSequenceCount(w) + 1); // check the sequence type if (ConciseSetUtils.isOneSequence(w)) { if (ConciseSetUtils.isSequenceWithNoBits(w)) { setBitsInCurrentWord = sequenceLength; if (position < setBitsInCurrentWord) { return firstSetBitInWord + position; } } else { setBitsInCurrentWord = sequenceLength - 1; if (position < setBitsInCurrentWord) // check whether the desired set bit is after the // flipped bit (or after the first block) { return firstSetBitInWord + position + (position < ConciseSetUtils.getFlippedBit(w) ? 0 : 1); } } } else { if (ConciseSetUtils.isSequenceWithNoBits(w)) { setBitsInCurrentWord = 0; } else { setBitsInCurrentWord = 1; if (position == 0) { return firstSetBitInWord + ConciseSetUtils.getFlippedBit(w); } } } // skip the 31-bit blocks firstSetBitInWord += sequenceLength; } // update the number of found set bits position -= setBitsInCurrentWord; } throw new IndexOutOfBoundsException(Integer.toString(i)); } public int compareTo(ImmutableConciseSet other) { return words.asReadOnlyBuffer().compareTo(other.words.asReadOnlyBuffer()); } private boolean isEmpty() { return words == null || words.limit() == 0; } @Override // Based on the AbstractIntSet implementation by Alessandro Colantonio public String toString() { IntSet.IntIterator itr = iterator(); if (!itr.hasNext()) { return "[]"; } StringBuilder sb = new StringBuilder(); sb.append('['); for (; ; ) { sb.append(itr.next()); if (!itr.hasNext()) { return sb.append(']').toString(); } sb.append(", "); } } // Based on the ConciseSet implementation by Alessandro Colantonio public IntSet.IntIterator iterator() { if (isEmpty()) { return new IntSet.IntIterator() { @Override public void skipAllBefore(int element) {/*empty*/} @Override public boolean hasNext() {return false;} @Override public int next() {throw new NoSuchElementException();} @Override public void remove() {throw new UnsupportedOperationException();} @Override public IntSet.IntIterator clone() {throw new UnsupportedOperationException();} }; } return new BitIterator(); } public WordIterator newWordIterator() { return new WordIterator(); } private static class WordHolder { private final int word; private final WordIterator iterator; public WordHolder( int word, WordIterator iterator ) { this.word = word; this.iterator = iterator; } public int getWord() { return word; } public WordIterator getIterator() { return iterator; } } // Based on the ConciseSet implementation by Alessandro Colantonio private class BitIterator implements IntSet.IntIterator { final ConciseSetUtils.LiteralAndZeroFillExpander litExp; final ConciseSetUtils.OneFillExpander oneExp; ConciseSetUtils.WordExpander exp; int nextIndex = 0; int nextOffset = 0; private BitIterator() { litExp = ConciseSetUtils.newLiteralAndZeroFillExpander(); oneExp = ConciseSetUtils.newOneFillExpander(); nextWord(); } private BitIterator( ConciseSetUtils.LiteralAndZeroFillExpander litExp, ConciseSetUtils.OneFillExpander oneExp, ConciseSetUtils.WordExpander exp, int nextIndex, int nextOffset ) { this.litExp = litExp; this.oneExp = oneExp; this.exp = exp; this.nextIndex = nextIndex; this.nextOffset = nextOffset; } @Override public boolean hasNext() { while (!exp.hasNext()) { if (nextIndex > lastWordIndex) { return false; } nextWord(); } return true; } @Override public int next() { if (!hasNext()) { throw new NoSuchElementException(); } return exp.next(); } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public void skipAllBefore(int element) { while (true) { exp.skipAllBefore(element); if (exp.hasNext() || nextIndex > lastWordIndex) { return; } nextWord(); } } @Override public IntSet.IntIterator clone() { return new BitIterator( (ConciseSetUtils.LiteralAndZeroFillExpander) litExp.clone(), (ConciseSetUtils.OneFillExpander) oneExp.clone(), exp.clone(), nextIndex, nextOffset ); } private void nextWord() { final int word = words.get(nextIndex++); exp = ConciseSetUtils.isOneSequence(word) ? oneExp : litExp; exp.reset(nextOffset, word, true); // prepare next offset if (ConciseSetUtils.isLiteral(word)) { nextOffset += ConciseSetUtils.MAX_LITERAL_LENGTH; } else { nextOffset += ConciseSetUtils.maxLiteralLengthMultiplication(ConciseSetUtils.getSequenceCount(word) + 1); } } } public class WordIterator implements Iterator { private int startIndex; private int wordsWalked; private int currWord; private int nextWord; private int currRow; private volatile boolean hasNextWord = false; WordIterator() { startIndex = -1; wordsWalked = 0; currRow = -1; } public void advanceTo(int endCount) { while (hasNext() && wordsWalked < endCount) { next(); } if (wordsWalked <= endCount) { return; } nextWord = (currWord & 0xC1000000) | (wordsWalked - endCount - 1); startIndex = endCount; hasNextWord = true; } @Override public boolean hasNext() { if (isEmpty()) { return false; } if (hasNextWord) { return true; } return currRow < (words.capacity() - 1); } @Override public Integer next() { if (hasNextWord) { currWord = nextWord; hasNextWord = false; return new Integer(currWord); } currWord = words.get(++currRow); if (ConciseSetUtils.isLiteral(currWord)) { startIndex = wordsWalked++; } else { startIndex = wordsWalked; wordsWalked += ConciseSetUtils.getSequenceNumWords(currWord); } return new Integer(currWord); } @Override public void remove() { throw new UnsupportedOperationException(); } } }
/* Copyright 2011-2016 Google Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.google.security.zynamics.binnavi.debug.debugger; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.CUtilityFunctions; import com.google.security.zynamics.binnavi.Database.Exceptions.CouldntSaveDataException; import com.google.security.zynamics.binnavi.Database.Interfaces.SQLProvider; import com.google.security.zynamics.binnavi.debug.debugger.interfaces.IDebuggerTemplateListener; import com.google.security.zynamics.binnavi.disassembly.IDatabaseObject; import com.google.security.zynamics.zylib.general.ListenerProvider; import com.google.security.zynamics.zylib.net.NetHelpers; /** * A debugger template describes the location of a debug client. */ public final class DebuggerTemplate implements IDatabaseObject { /** * Used to synchronize the debugger template object with the database. */ private final SQLProvider sqlProvider; /** * Listeners that are notified about changes in the debugger template. */ private final ListenerProvider<IDebuggerTemplateListener> listeners = new ListenerProvider<>(); /** * The ID of the debugger template in the database. */ private final int debuggerTemplateId; /** * The name of the debugger template. */ private String debuggerTemplateName; /** * The host address of the debug client. */ private String debugClientHost; /** * The port of the debug client. */ private int debugClientPort; /** * Creates a new debugger template object. * * @param debuggerId The ID of the debugger template in the database. * @param name The name of the debugger template. * @param host The host address of the debug client. * @param port The port of the debug client. * @param sqlProvider Used to synchronize the debugger template object with the database. */ public DebuggerTemplate(final int debuggerId, final String name, final String host, final int port, final SQLProvider sqlProvider) { Preconditions.checkArgument(debuggerId > 0, "IE00796: ID argument must be positive"); debuggerTemplateName = Preconditions.checkNotNull(name, "IE00797: Name argument can not be null"); debugClientHost = Preconditions.checkNotNull(host, "IE00798: Host argument can not be null"); Preconditions.checkArgument(NetHelpers.isValidPort(port), "IE00799: Invalid port argument"); this.sqlProvider = Preconditions.checkNotNull(sqlProvider, "IE00800: SQL provider argument can not be null"); debuggerTemplateId = debuggerId; debugClientPort = port; } /** * Adds a listener that is notified about changes in the debugger template. * * @param listener The listener object to add. */ public void addListener(final IDebuggerTemplateListener listener) { listeners.addListener(listener); } /** * Returns the host location of the debug client. * * @return The host location of the debug client. */ public String getHost() { return debugClientHost; } /** * Returns the database ID of the debugger template. * * @return The database ID of the debugger template. */ public int getId() { return debuggerTemplateId; } /** * Returns the name of the debugger template. * * @return The name of the debugger template. */ public String getName() { return debuggerTemplateName; } /** * Returns the port of the debug client. * * @return The port of the debug client. */ public int getPort() { return debugClientPort; } @Override public boolean inSameDatabase(final IDatabaseObject object) { Preconditions.checkNotNull(object, "IE00801: Object argument can not be null"); return object.inSameDatabase(sqlProvider); } @Override public boolean inSameDatabase(final SQLProvider provider) { return sqlProvider == provider; } /** * Removes a listener from the template. * * @param listener The listener to remove. */ public void removeListener(final IDebuggerTemplateListener listener) { listeners.removeListener(listener); } /** * Sets the host of the debug client. * * @param host The host of the debug client. * * @throws CouldntSaveDataException Thrown if the debug host could not be updated. */ public void setHost(final String host) throws CouldntSaveDataException { Preconditions.checkNotNull(host, "IE00802: Host argument can not be null"); if (debugClientHost.equals(host)) { return; } sqlProvider.setHost(this, host); debugClientHost = host; for (final IDebuggerTemplateListener listener : listeners) { try { listener.changedHost(this); } catch (final Exception e) { CUtilityFunctions.logException(e); } } } /** * Changes the name of the debugger template. * * @param name The new name of the debugger template. * * @throws CouldntSaveDataException Thrown if the name of the debugger template could not be * updated. */ public void setName(final String name) throws CouldntSaveDataException { Preconditions.checkNotNull(name, "IE00803: Name argument can not be null"); if (debuggerTemplateName.equals(name)) { return; } sqlProvider.setName(this, name); debuggerTemplateName = name; for (final IDebuggerTemplateListener listener : listeners) { try { listener.changedName(this); } catch (final Exception e) { CUtilityFunctions.logException(e); } } } /** * Updates the port of the debug client. * * @param port The new debug client port. * * @throws CouldntSaveDataException Thrown if the port could not be updated. */ public void setPort(final int port) throws CouldntSaveDataException { Preconditions.checkArgument(NetHelpers.isValidPort(port), "IE00804: Invalid port"); if (debugClientPort == port) { return; } sqlProvider.setPort(this, port); debugClientPort = port; for (final IDebuggerTemplateListener listener : listeners) { try { listener.changedPort(this); } catch (final Exception e) { CUtilityFunctions.logException(e); } } } @Override public String toString() { return debuggerTemplateName + " - " + debugClientHost + ":" + debugClientPort; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.web; import com.google.common.collect.ImmutableMap; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileChecksum; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.XAttrCodec; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.StringUtils; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import java.io.IOException; import java.util.*; /** JSON Utilities */ public class JsonUtil { private static final Object[] EMPTY_OBJECT_ARRAY = {}; // Reuse ObjectMapper instance for improving performance. // ObjectMapper is thread safe as long as we always configure instance // before use. We don't have a re-entrant call pattern in WebHDFS, // so we just need to worry about thread-safety. private static final ObjectMapper MAPPER = new ObjectMapper(); /** Convert a token object to a Json string. */ public static String toJsonString(final Token<? extends TokenIdentifier> token ) throws IOException { return toJsonString(Token.class, toJsonMap(token)); } private static Map<String, Object> toJsonMap( final Token<? extends TokenIdentifier> token) throws IOException { if (token == null) { return null; } final Map<String, Object> m = new TreeMap<String, Object>(); m.put("urlString", token.encodeToUrlString()); return m; } /** Convert an exception object to a Json string. */ public static String toJsonString(final Exception e) { final Map<String, Object> m = new TreeMap<String, Object>(); m.put("exception", e.getClass().getSimpleName()); m.put("message", e.getMessage()); m.put("javaClassName", e.getClass().getName()); return toJsonString(RemoteException.class, m); } private static String toJsonString(final Class<?> clazz, final Object value) { return toJsonString(clazz.getSimpleName(), value); } /** Convert a key-value pair to a Json string. */ public static String toJsonString(final String key, final Object value) { final Map<String, Object> m = new TreeMap<String, Object>(); m.put(key, value); try { return MAPPER.writeValueAsString(m); } catch (IOException ignored) { } return null; } /** Convert a FsPermission object to a string. */ private static String toString(final FsPermission permission) { return String.format("%o", permission.toShort()); } /** Convert a HdfsFileStatus object to a Json string. */ public static String toJsonString(final HdfsFileStatus status, boolean includeType) { if (status == null) { return null; } final Map<String, Object> m = toJsonMap(status); try { return includeType ? toJsonString(FileStatus.class, m) : MAPPER.writeValueAsString(m); } catch (IOException ignored) { } return null; } private static Map<String, Object> toJsonMap(HdfsFileStatus status) { final Map<String, Object> m = new TreeMap<String, Object>(); m.put("pathSuffix", status.getLocalName()); m.put("type", WebHdfsConstants.PathType.valueOf(status)); if (status.isSymlink()) { m.put("symlink", DFSUtilClient.bytes2String(status.getSymlinkInBytes())); } m.put("length", status.getLen()); m.put("owner", status.getOwner()); m.put("group", status.getGroup()); FsPermission perm = status.getPermission(); m.put("permission", toString(perm)); if (status.hasAcl()) { m.put("aclBit", true); } if (status.isEncrypted()) { m.put("encBit", true); } if (status.isErasureCoded()) { m.put("ecBit", true); if (status.getErasureCodingPolicy() != null) { // to maintain backward comparability m.put("ecPolicy", status.getErasureCodingPolicy().getName()); // to re-construct HdfsFileStatus object via WebHdfs m.put("ecPolicyObj", getEcPolicyAsMap(status.getErasureCodingPolicy())); } } if (status.isSnapshotEnabled()) { m.put("snapshotEnabled", status.isSnapshotEnabled()); } m.put("accessTime", status.getAccessTime()); m.put("modificationTime", status.getModificationTime()); m.put("blockSize", status.getBlockSize()); m.put("replication", status.getReplication()); m.put("fileId", status.getFileId()); m.put("childrenNum", status.getChildrenNum()); m.put("storagePolicy", status.getStoragePolicy()); return m; } private static Map<String, Object> getEcPolicyAsMap( final ErasureCodingPolicy ecPolicy) { /** Convert an ErasureCodingPolicy to a map. */ ImmutableMap.Builder<String, Object> builder = ImmutableMap.builder(); builder.put("name", ecPolicy.getName()) .put("cellSize", ecPolicy.getCellSize()) .put("numDataUnits", ecPolicy.getNumDataUnits()) .put("numParityUnits", ecPolicy.getNumParityUnits()) .put("codecName", ecPolicy.getCodecName()) .put("id", ecPolicy.getId()) .put("extraOptions", ecPolicy.getSchema().getExtraOptions()); return builder.build(); } /** Convert an ExtendedBlock to a Json map. */ private static Map<String, Object> toJsonMap(final ExtendedBlock extendedblock) { if (extendedblock == null) { return null; } final Map<String, Object> m = new TreeMap<String, Object>(); m.put("blockPoolId", extendedblock.getBlockPoolId()); m.put("blockId", extendedblock.getBlockId()); m.put("numBytes", extendedblock.getNumBytes()); m.put("generationStamp", extendedblock.getGenerationStamp()); return m; } /** Convert a DatanodeInfo to a Json map. */ static Map<String, Object> toJsonMap(final DatanodeInfo datanodeinfo) { if (datanodeinfo == null) { return null; } // TODO: Fix storageID final Map<String, Object> m = new TreeMap<String, Object>(); m.put("ipAddr", datanodeinfo.getIpAddr()); // 'name' is equivalent to ipAddr:xferPort. Older clients (1.x, 0.23.x) // expects this instead of the two fields. m.put("name", datanodeinfo.getXferAddr()); m.put("hostName", datanodeinfo.getHostName()); m.put("storageID", datanodeinfo.getDatanodeUuid()); m.put("xferPort", datanodeinfo.getXferPort()); m.put("infoPort", datanodeinfo.getInfoPort()); m.put("infoSecurePort", datanodeinfo.getInfoSecurePort()); m.put("ipcPort", datanodeinfo.getIpcPort()); m.put("capacity", datanodeinfo.getCapacity()); m.put("dfsUsed", datanodeinfo.getDfsUsed()); m.put("remaining", datanodeinfo.getRemaining()); m.put("blockPoolUsed", datanodeinfo.getBlockPoolUsed()); m.put("cacheCapacity", datanodeinfo.getCacheCapacity()); m.put("cacheUsed", datanodeinfo.getCacheUsed()); m.put("lastUpdate", datanodeinfo.getLastUpdate()); m.put("lastUpdateMonotonic", datanodeinfo.getLastUpdateMonotonic()); m.put("xceiverCount", datanodeinfo.getXceiverCount()); m.put("networkLocation", datanodeinfo.getNetworkLocation()); m.put("adminState", datanodeinfo.getAdminState().name()); if (datanodeinfo.getUpgradeDomain() != null) { m.put("upgradeDomain", datanodeinfo.getUpgradeDomain()); } m.put("lastBlockReportTime", datanodeinfo.getLastBlockReportTime()); m.put("lastBlockReportMonotonic", datanodeinfo.getLastBlockReportMonotonic()); return m; } /** Convert a DatanodeInfo[] to a Json array. */ private static Object[] toJsonArray(final DatanodeInfo[] array) { if (array == null) { return null; } else if (array.length == 0) { return EMPTY_OBJECT_ARRAY; } else { final Object[] a = new Object[array.length]; for(int i = 0; i < array.length; i++) { a[i] = toJsonMap(array[i]); } return a; } } /** Convert a StorageType[] to a Json array. */ private static Object[] toJsonArray(final StorageType[] array) { if (array == null) { return null; } else if (array.length == 0) { return EMPTY_OBJECT_ARRAY; } else { final Object[] a = new Object[array.length]; for(int i = 0; i < array.length; i++) { a[i] = array[i]; } return a; } } /** Convert a LocatedBlock to a Json map. */ private static Map<String, Object> toJsonMap(final LocatedBlock locatedblock ) throws IOException { if (locatedblock == null) { return null; } final Map<String, Object> m = new TreeMap<String, Object>(); m.put("blockToken", toJsonMap(locatedblock.getBlockToken())); m.put("isCorrupt", locatedblock.isCorrupt()); m.put("startOffset", locatedblock.getStartOffset()); m.put("block", toJsonMap(locatedblock.getBlock())); m.put("storageTypes", toJsonArray(locatedblock.getStorageTypes())); m.put("locations", toJsonArray(locatedblock.getLocations())); m.put("cachedLocations", toJsonArray(locatedblock.getCachedLocations())); return m; } private static Map<String, Object> toJson(final DirectoryListing listing) throws IOException { final Map<String, Object> m = new TreeMap<>(); // Serialize FileStatus[] to a FileStatuses map m.put("partialListing", toJsonMap(listing.getPartialListing())); // Simple int m.put("remainingEntries", listing.getRemainingEntries()); return m; } public static String toJsonString(final DirectoryListing listing) throws IOException { if (listing == null) { return null; } return toJsonString(DirectoryListing.class, toJson(listing)); } private static Map<String, Object> toJsonMap(HdfsFileStatus[] statuses) throws IOException { if (statuses == null) { return null; } final Map<String, Object> fileStatuses = new TreeMap<>(); final Map<String, Object> fileStatus = new TreeMap<>(); fileStatuses.put("FileStatuses", fileStatus); final Object[] array = new Object[statuses.length]; fileStatus.put("FileStatus", array); for (int i = 0; i < statuses.length; i++) { array[i] = toJsonMap(statuses[i]); } return fileStatuses; } /** Convert a LocatedBlock[] to a Json array. */ private static Object[] toJsonArray(final List<LocatedBlock> array ) throws IOException { if (array == null) { return null; } else if (array.size() == 0) { return EMPTY_OBJECT_ARRAY; } else { final Object[] a = new Object[array.size()]; for(int i = 0; i < array.size(); i++) { a[i] = toJsonMap(array.get(i)); } return a; } } /** Convert LocatedBlocks to a Json string. */ public static String toJsonString(final LocatedBlocks locatedblocks ) throws IOException { if (locatedblocks == null) { return null; } final Map<String, Object> m = new TreeMap<String, Object>(); m.put("fileLength", locatedblocks.getFileLength()); m.put("isUnderConstruction", locatedblocks.isUnderConstruction()); m.put("locatedBlocks", toJsonArray(locatedblocks.getLocatedBlocks())); m.put("lastLocatedBlock", toJsonMap(locatedblocks.getLastLocatedBlock())); m.put("isLastBlockComplete", locatedblocks.isLastBlockComplete()); return toJsonString(LocatedBlocks.class, m); } /** Convert a ContentSummary to a Json string. */ public static String toJsonString(final ContentSummary contentsummary) { if (contentsummary == null) { return null; } final Map<String, Object> m = new TreeMap<String, Object>(); m.put("length", contentsummary.getLength()); m.put("fileCount", contentsummary.getFileCount()); m.put("directoryCount", contentsummary.getDirectoryCount()); m.put("quota", contentsummary.getQuota()); m.put("spaceConsumed", contentsummary.getSpaceConsumed()); m.put("spaceQuota", contentsummary.getSpaceQuota()); final Map<String, Map<String, Long>> typeQuota = new TreeMap<String, Map<String, Long>>(); for (StorageType t : StorageType.getTypesSupportingQuota()) { long tQuota = contentsummary.getTypeQuota(t); if (tQuota != HdfsConstants.QUOTA_RESET) { Map<String, Long> type = typeQuota.get(t.toString()); if (type == null) { type = new TreeMap<String, Long>(); typeQuota.put(t.toString(), type); } type.put("quota", contentsummary.getTypeQuota(t)); type.put("consumed", contentsummary.getTypeConsumed(t)); } } m.put("typeQuota", typeQuota); return toJsonString(ContentSummary.class, m); } /** Convert a MD5MD5CRC32FileChecksum to a Json string. */ public static String toJsonString(final MD5MD5CRC32FileChecksum checksum) { if (checksum == null) { return null; } final Map<String, Object> m = new TreeMap<String, Object>(); m.put("algorithm", checksum.getAlgorithmName()); m.put("length", checksum.getLength()); m.put("bytes", StringUtils.byteToHexString(checksum.getBytes())); return toJsonString(FileChecksum.class, m); } /** Convert a AclStatus object to a Json string. */ public static String toJsonString(final AclStatus status) { if (status == null) { return null; } final Map<String, Object> m = new TreeMap<String, Object>(); m.put("owner", status.getOwner()); m.put("group", status.getGroup()); m.put("stickyBit", status.isStickyBit()); final List<String> stringEntries = new ArrayList<>(); for (AclEntry entry : status.getEntries()) { stringEntries.add(entry.toStringStable()); } m.put("entries", stringEntries); FsPermission perm = status.getPermission(); if (perm != null) { m.put("permission", toString(perm)); } final Map<String, Map<String, Object>> finalMap = new TreeMap<String, Map<String, Object>>(); finalMap.put(AclStatus.class.getSimpleName(), m); try { return MAPPER.writeValueAsString(finalMap); } catch (IOException ignored) { } return null; } private static Map<String, Object> toJsonMap(final XAttr xAttr, final XAttrCodec encoding) throws IOException { if (xAttr == null) { return null; } final Map<String, Object> m = new TreeMap<String, Object>(); m.put("name", XAttrHelper.getPrefixedName(xAttr)); m.put("value", xAttr.getValue() != null ? XAttrCodec.encodeValue(xAttr.getValue(), encoding) : null); return m; } private static Object[] toJsonArray(final List<XAttr> array, final XAttrCodec encoding) throws IOException { if (array == null) { return null; } else if (array.size() == 0) { return EMPTY_OBJECT_ARRAY; } else { final Object[] a = new Object[array.size()]; for(int i = 0; i < array.size(); i++) { a[i] = toJsonMap(array.get(i), encoding); } return a; } } public static String toJsonString(final List<XAttr> xAttrs, final XAttrCodec encoding) throws IOException { final Map<String, Object> finalMap = new TreeMap<String, Object>(); finalMap.put("XAttrs", toJsonArray(xAttrs, encoding)); return MAPPER.writeValueAsString(finalMap); } public static String toJsonString(final List<XAttr> xAttrs) throws IOException { final List<String> names = Lists.newArrayListWithCapacity(xAttrs.size()); for (XAttr xAttr : xAttrs) { names.add(XAttrHelper.getPrefixedName(xAttr)); } String ret = MAPPER.writeValueAsString(names); final Map<String, Object> finalMap = new TreeMap<String, Object>(); finalMap.put("XAttrNames", ret); return MAPPER.writeValueAsString(finalMap); } public static String toJsonString(Object obj) throws IOException { return MAPPER.writeValueAsString(obj); } public static String toJsonString(BlockStoragePolicy[] storagePolicies) { final Map<String, Object> blockStoragePolicies = new TreeMap<>(); Object[] a = null; if (storagePolicies != null && storagePolicies.length > 0) { a = new Object[storagePolicies.length]; for (int i = 0; i < storagePolicies.length; i++) { a[i] = toJsonMap(storagePolicies[i]); } } blockStoragePolicies.put("BlockStoragePolicy", a); return toJsonString("BlockStoragePolicies", blockStoragePolicies); } private static Object toJsonMap(BlockStoragePolicy blockStoragePolicy) { final Map<String, Object> m = new TreeMap<String, Object>(); m.put("id", blockStoragePolicy.getId()); m.put("name", blockStoragePolicy.getName()); m.put("storageTypes", blockStoragePolicy.getStorageTypes()); m.put("creationFallbacks", blockStoragePolicy.getCreationFallbacks()); m.put("replicationFallbacks", blockStoragePolicy.getReplicationFallbacks()); m.put("copyOnCreateFile", blockStoragePolicy.isCopyOnCreateFile()); return m; } public static String toJsonString(BlockStoragePolicy storagePolicy) { return toJsonString(BlockStoragePolicy.class, toJsonMap(storagePolicy)); } public static String toJsonString(FsServerDefaults serverDefaults) { return toJsonString(FsServerDefaults.class, toJsonMap(serverDefaults)); } private static Object toJsonMap(FsServerDefaults serverDefaults) { final Map<String, Object> m = new HashMap<String, Object>(); m.put("blockSize", serverDefaults.getBlockSize()); m.put("bytesPerChecksum", serverDefaults.getBytesPerChecksum()); m.put("writePacketSize", serverDefaults.getWritePacketSize()); m.put("replication", serverDefaults.getReplication()); m.put("fileBufferSize", serverDefaults.getFileBufferSize()); m.put("encryptDataTransfer", serverDefaults.getEncryptDataTransfer()); m.put("trashInterval", serverDefaults.getTrashInterval()); m.put("checksumType", serverDefaults.getChecksumType().id); m.put("keyProviderUri", serverDefaults.getKeyProviderUri()); m.put("defaultStoragePolicyId", serverDefaults.getDefaultStoragePolicyId()); return m; } public static String toJsonString(SnapshotDiffReport diffReport) { return toJsonString(SnapshotDiffReport.class.getSimpleName(), toJsonMap(diffReport)); } private static Object toJsonMap(SnapshotDiffReport diffReport) { final Map<String, Object> m = new TreeMap<String, Object>(); m.put("snapshotRoot", diffReport.getSnapshotRoot()); m.put("fromSnapshot", diffReport.getFromSnapshot()); m.put("toSnapshot", diffReport.getLaterSnapshotName()); Object[] diffList = new Object[diffReport.getDiffList().size()]; for (int i = 0; i < diffReport.getDiffList().size(); i++) { diffList[i] = toJsonMap(diffReport.getDiffList().get(i)); } m.put("diffList", diffList); return m; } private static Object toJsonMap( SnapshotDiffReport.DiffReportEntry diffReportEntry) { final Map<String, Object> m = new TreeMap<String, Object>(); m.put("type", diffReportEntry.getType()); if (diffReportEntry.getSourcePath() != null) { m.put("sourcePath", DFSUtilClient.bytes2String(diffReportEntry.getSourcePath())); } if (diffReportEntry.getTargetPath() != null) { m.put("targetPath", DFSUtilClient.bytes2String(diffReportEntry.getTargetPath())); } return m; } public static String toJsonString( SnapshottableDirectoryStatus[] snapshottableDirectoryList) { if (snapshottableDirectoryList == null) { return toJsonString("SnapshottableDirectoryList", null); } Object[] a = new Object[snapshottableDirectoryList.length]; for (int i = 0; i < snapshottableDirectoryList.length; i++) { a[i] = toJsonMap(snapshottableDirectoryList[i]); } return toJsonString("SnapshottableDirectoryList", a); } private static Object toJsonMap( SnapshottableDirectoryStatus snapshottableDirectoryStatus) { final Map<String, Object> m = new TreeMap<String, Object>(); m.put("snapshotNumber", snapshottableDirectoryStatus.getSnapshotNumber()); m.put("snapshotQuota", snapshottableDirectoryStatus.getSnapshotQuota()); m.put("parentFullPath", DFSUtilClient .bytes2String(snapshottableDirectoryStatus.getParentFullPath())); m.put("dirStatus", toJsonMap(snapshottableDirectoryStatus.getDirStatus())); return m; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.io; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.text.StringUtil; import org.jetbrains.annotations.NotNull; import java.io.IOException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; /** * @author Konstantin Kolosovsky. */ public abstract class BaseDataReader { private static final Logger LOG = Logger.getInstance(BaseDataReader.class); protected final SleepingPolicy mySleepingPolicy; protected final Object mySleepMonitor = new Object(); protected volatile boolean isStopped; private Future<?> myFinishedFuture; public BaseDataReader(SleepingPolicy sleepingPolicy) { mySleepingPolicy = sleepingPolicy != null ? sleepingPolicy : SleepingPolicy.SIMPLE; } /** @deprecated use {@link #start(String)} instead (to be removed in IDEA 17) */ @Deprecated protected void start() { start(""); } protected void start(@NotNull final String presentableName) { if (StringUtil.isEmptyOrSpaces(presentableName)) { LOG.warn(new Throwable("Must provide not-empty presentable name")); } if (myFinishedFuture == null) { myFinishedFuture = executeOnPooledThread(new Runnable() { @Override public void run() { String oldThreadName = Thread.currentThread().getName(); if (!StringUtil.isEmptyOrSpaces(presentableName)) { Thread.currentThread().setName("BaseDataReader: " + presentableName); } try { doRun(); } finally { Thread.currentThread().setName(oldThreadName); } } }); } } /** * Please don't override this method as the BseOSProcessProcessHandler assumes that it can be two reading modes: blocking and non-blocking. * Implement {@link #readAvailableBlocking} and {@link #readAvailableNonBlocking} instead. * * * If the process handler assumes that reader handles the blocking mode, while it doesn't, it will result into premature stream close. * * @return true in case any data was read * @throws IOException if an exception during IO happened */ protected boolean readAvailable() throws IOException { return mySleepingPolicy == SleepingPolicy.BLOCKING ? readAvailableBlocking() : readAvailableNonBlocking(); } /** * * Non-blocking read returns the control back to the process handler when there is no data to read. * */ protected boolean readAvailableNonBlocking() throws IOException { throw new UnsupportedOperationException(); } /** * Reader in a blocking mode blocks on IO read operation until data is received. It exits the method only after stream is closed. */ protected boolean readAvailableBlocking() throws IOException { throw new UnsupportedOperationException(); } @NotNull protected abstract Future<?> executeOnPooledThread(@NotNull Runnable runnable); public interface SleepingPolicy { int sleepTimeWhenWasActive = 1; int sleepTimeWhenIdle = 5; SleepingPolicy SIMPLE = new SleepingPolicy() { @Override public int getTimeToSleep(boolean wasActive) { return wasActive ? sleepTimeWhenWasActive : sleepTimeWhenIdle; } }; SleepingPolicy BLOCKING = new SleepingPolicy() { @Override public int getTimeToSleep(boolean wasActive) { // in blocking mode we need to sleep only when we have reached end of the stream // so it can be a long sleeping return 50; } }; int getTimeToSleep(boolean wasActive); } public static class AdaptiveSleepingPolicy implements SleepingPolicy { private static final int maxSleepTimeWhenIdle = 200; private static final int maxIterationsWithCurrentSleepTime = 50; private volatile int myIterationsWithCurrentTime; private volatile int myCurrentSleepTime = sleepTimeWhenIdle; @Override public int getTimeToSleep(boolean wasActive) { int currentSleepTime = myCurrentSleepTime; // volatile read if (wasActive) currentSleepTime = sleepTimeWhenWasActive; else if (currentSleepTime == sleepTimeWhenWasActive) { currentSleepTime = sleepTimeWhenIdle; myIterationsWithCurrentTime = 0; } else { int iterationsWithCurrentTime = ++myIterationsWithCurrentTime; if (iterationsWithCurrentTime >= maxIterationsWithCurrentSleepTime) { myIterationsWithCurrentTime = 0; currentSleepTime = Math.min(2* currentSleepTime, maxSleepTimeWhenIdle); } } myCurrentSleepTime = currentSleepTime; // volatile write return currentSleepTime; } } protected void doRun() { try { boolean stopSignalled = false; while (true) { final boolean read = readAvailable(); if (stopSignalled || mySleepingPolicy == SleepingPolicy.BLOCKING) { break; } stopSignalled = isStopped; if (!stopSignalled) { // if process stopped, there is no sense to sleep, // just check if there is unread output in the stream synchronized (mySleepMonitor) { mySleepMonitor.wait(mySleepingPolicy.getTimeToSleep(read)); } } } } catch (IOException e) { LOG.info(e); } catch (Exception e) { LOG.error(e); } finally { try { close(); } catch (IOException e) { LOG.error("Can't close stream", e); } } } private void resumeReading() { synchronized (mySleepMonitor) { mySleepMonitor.notifyAll(); } } protected abstract void close() throws IOException; public void stop() { isStopped = true; resumeReading(); } public void waitFor() throws InterruptedException { try { myFinishedFuture.get(); } catch (ExecutionException e) { LOG.error(e); } } public void waitFor(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException { try { myFinishedFuture.get(timeout, unit); } catch (ExecutionException e) { LOG.error(e); } } }
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.spi.impl.operationservice.impl; import com.hazelcast.core.ExecutionCallback; import com.hazelcast.instance.MemberImpl; import com.hazelcast.instance.Node; import com.hazelcast.internal.cluster.ClusterClock; import com.hazelcast.internal.management.dto.SlowOperationDTO; import com.hazelcast.internal.metrics.MetricsProvider; import com.hazelcast.internal.metrics.MetricsRegistry; import com.hazelcast.internal.metrics.Probe; import com.hazelcast.internal.partition.InternalPartitionService; import com.hazelcast.internal.serialization.InternalSerializationService; import com.hazelcast.internal.util.counters.Counter; import com.hazelcast.internal.util.counters.MwCounter; import com.hazelcast.logging.ILogger; import com.hazelcast.nio.Address; import com.hazelcast.nio.Packet; import com.hazelcast.spi.ExecutionService; import com.hazelcast.spi.InternalCompletableFuture; import com.hazelcast.spi.InvocationBuilder; import com.hazelcast.spi.LiveOperations; import com.hazelcast.spi.LiveOperationsTracker; import com.hazelcast.spi.Operation; import com.hazelcast.spi.OperationFactory; import com.hazelcast.spi.OperationService; import com.hazelcast.spi.impl.NodeEngineImpl; import com.hazelcast.spi.impl.PacketHandler; import com.hazelcast.spi.impl.PartitionSpecificRunnable; import com.hazelcast.spi.impl.operationexecutor.OperationExecutor; import com.hazelcast.spi.impl.operationexecutor.impl.OperationExecutorImpl; import com.hazelcast.spi.impl.operationexecutor.slowoperationdetector.SlowOperationDetector; import com.hazelcast.spi.impl.operationservice.InternalOperationService; import com.hazelcast.util.EmptyStatement; import com.hazelcast.util.executor.ExecutorType; import com.hazelcast.util.executor.ManagedExecutorService; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import static com.hazelcast.internal.metrics.ProbeLevel.MANDATORY; import static com.hazelcast.internal.util.counters.MwCounter.newMwCounter; import static com.hazelcast.spi.InvocationBuilder.DEFAULT_CALL_TIMEOUT; import static com.hazelcast.spi.InvocationBuilder.DEFAULT_DESERIALIZE_RESULT; import static com.hazelcast.spi.InvocationBuilder.DEFAULT_REPLICA_INDEX; import static com.hazelcast.spi.InvocationBuilder.DEFAULT_TRY_COUNT; import static com.hazelcast.spi.InvocationBuilder.DEFAULT_TRY_PAUSE_MILLIS; import static com.hazelcast.spi.impl.operationutil.Operations.isJoinOperation; import static com.hazelcast.spi.properties.GroupProperty.OPERATION_CALL_TIMEOUT_MILLIS; import static com.hazelcast.util.CollectionUtil.toIntegerList; import static com.hazelcast.util.Preconditions.checkNotNegative; import static com.hazelcast.util.Preconditions.checkNotNull; import static java.util.Collections.newSetFromMap; import static java.util.concurrent.TimeUnit.SECONDS; /** * This is the implementation of the {@link com.hazelcast.spi.impl.operationservice.InternalOperationService}. * <p/> * <h1>System Operation</h1> * When a {@link com.hazelcast.spi.UrgentSystemOperation} is invoked on this OperationService, it will be executed with a * high urgency by making use of a urgent queue. So when the system is under load, and the operation queues are * filled, then system operations are executed before normal operation. The advantage is that when a system is under * pressure, it still is able to do things like recognizing new members in the cluster and moving partitions around. * <p/> * When a UrgentSystemOperation is send to a remote machine, it is wrapped in a {@link Packet} and the packet is marked as a * urgent packet. When this packet is received on the remove OperationService, the urgent flag is checked and if * needed, the operation is set on the urgent queue. So local and remote execution of System operations will obey * the urgency. * * @see Invocation * @see InvocationBuilderImpl * @see PartitionInvocation * @see TargetInvocation */ public final class OperationServiceImpl implements InternalOperationService, MetricsProvider, LiveOperationsTracker { private static final int ASYNC_QUEUE_CAPACITY = 100000; private static final long TERMINATION_TIMEOUT_MILLIS = SECONDS.toMillis(10); final InvocationRegistry invocationRegistry; final OperationExecutor operationExecutor; @Probe(name = "operationTimeoutCount", level = MANDATORY) final MwCounter operationTimeoutCount = newMwCounter(); @Probe(name = "callTimeoutCount", level = MANDATORY) final MwCounter callTimeoutCount = newMwCounter(); @Probe(name = "retryCount", level = MANDATORY) final MwCounter retryCount = newMwCounter(); @Probe(name = "failedBackups", level = MANDATORY) final Counter failedBackupsCount = newMwCounter(); final NodeEngineImpl nodeEngine; final Node node; final ILogger logger; final OperationBackupHandler backupHandler; final BackpressureRegulator backpressureRegulator; final OutboundResponseHandler outboundResponseHandler; final OutboundOperationHandler outboundOperationHandler; volatile Invocation.Context invocationContext; private final InvocationMonitor invocationMonitor; private final SlowOperationDetector slowOperationDetector; private final AsyncInboundResponseHandler asyncInboundResponseHandler; private final InternalSerializationService serializationService; private final InboundResponseHandler inboundResponseHandler; private final Address thisAddress; // contains the current executing asyncOperations. This information is needed for the operation-ping. // this is a temporary solution till we found a better async operation abstraction @Probe private final Set<Operation> asyncOperations = newSetFromMap(new ConcurrentHashMap<Operation, Boolean>()); public OperationServiceImpl(NodeEngineImpl nodeEngine) { this.nodeEngine = nodeEngine; this.node = nodeEngine.getNode(); this.thisAddress = node.getThisAddress(); this.logger = node.getLogger(OperationService.class); this.serializationService = (InternalSerializationService) nodeEngine.getSerializationService(); this.backpressureRegulator = new BackpressureRegulator( node.getProperties(), node.getLogger(BackpressureRegulator.class)); this.outboundResponseHandler = new OutboundResponseHandler( thisAddress, serializationService, node, node.getLogger(OutboundResponseHandler.class)); this.invocationRegistry = new InvocationRegistry( node.getLogger(OperationServiceImpl.class), backpressureRegulator.newCallIdSequence()); this.invocationMonitor = new InvocationMonitor( nodeEngine, thisAddress, node.getProperties(), invocationRegistry, node.getLogger(InvocationMonitor.class), serializationService, nodeEngine.getServiceManager()); this.outboundOperationHandler = new OutboundOperationHandler(node, thisAddress, serializationService); this.backupHandler = new OperationBackupHandler(this, outboundOperationHandler); String hzName = nodeEngine.getHazelcastInstance().getName(); this.inboundResponseHandler = new InboundResponseHandler( node.getLogger(InboundResponseHandler.class), node.getSerializationService(), invocationRegistry, nodeEngine); ClassLoader configClassLoader = node.getConfigClassLoader(); this.asyncInboundResponseHandler = new AsyncInboundResponseHandler(configClassLoader, hzName, node.getLogger(AsyncInboundResponseHandler.class), inboundResponseHandler, node.getProperties()); this.operationExecutor = new OperationExecutorImpl( node.getProperties(), node.loggingService, thisAddress, new OperationRunnerFactoryImpl(this), node.getNodeExtension(), hzName, configClassLoader); this.slowOperationDetector = new SlowOperationDetector(node.loggingService, operationExecutor.getGenericOperationRunners(), operationExecutor.getPartitionOperationRunners(), node.getProperties(), hzName); } public OutboundResponseHandler getOutboundResponseHandler() { return outboundResponseHandler; } public PacketHandler getAsyncInboundResponseHandler() { return asyncInboundResponseHandler; } public InvocationMonitor getInvocationMonitor() { return invocationMonitor; } @Override public List<SlowOperationDTO> getSlowOperationDTOs() { return slowOperationDetector.getSlowOperationDTOs(); } public InvocationRegistry getInvocationRegistry() { return invocationRegistry; } public InboundResponseHandler getInboundResponseHandler() { return inboundResponseHandler; } @Override public int getPartitionThreadCount() { return operationExecutor.getPartitionThreadCount(); } @Override public int getGenericThreadCount() { return operationExecutor.getGenericThreadCount(); } @Override public int getRunningOperationsCount() { return operationExecutor.getRunningOperationCount(); } @Override public long getExecutedOperationCount() { return operationExecutor.getExecutedOperationCount(); } @Override public int getRemoteOperationsCount() { return invocationRegistry.size(); } @Override public int getOperationExecutorQueueSize() { return operationExecutor.getQueueSize(); } @Override public int getPriorityOperationExecutorQueueSize() { return operationExecutor.getPriorityQueueSize(); } public OperationExecutor getOperationExecutor() { return operationExecutor; } @Override public int getResponseQueueSize() { return asyncInboundResponseHandler.getQueueSize(); } @Override public void populate(LiveOperations liveOperations) { operationExecutor.scan(liveOperations); for (Operation op : asyncOperations) { liveOperations.add(op.getCallerAddress(), op.getCallId()); } } @Override public void execute(PartitionSpecificRunnable task) { operationExecutor.execute(task); } @Override public InvocationBuilder createInvocationBuilder(String serviceName, Operation op, int partitionId) { checkNotNegative(partitionId, "Partition ID cannot be negative!"); return new InvocationBuilderImpl(invocationContext, serviceName, op, partitionId); } @Override public InvocationBuilder createInvocationBuilder(String serviceName, Operation op, Address target) { checkNotNull(target, "Target cannot be null!"); return new InvocationBuilderImpl(invocationContext, serviceName, op, target); } @Override public void run(Operation op) { operationExecutor.run(op); } @Override public void execute(Operation op) { operationExecutor.execute(op); } @Override public boolean isRunAllowed(Operation op) { return operationExecutor.isRunAllowed(op); } @Override @SuppressWarnings("unchecked") public <E> InternalCompletableFuture<E> invokeOnPartition(String serviceName, Operation op, int partitionId) { op.setServiceName(serviceName) .setPartitionId(partitionId) .setReplicaIndex(DEFAULT_REPLICA_INDEX); return new PartitionInvocation( invocationContext, op, DEFAULT_TRY_COUNT, DEFAULT_TRY_PAUSE_MILLIS, DEFAULT_CALL_TIMEOUT, DEFAULT_DESERIALIZE_RESULT).invoke(); } @Override @SuppressWarnings("unchecked") public <E> InternalCompletableFuture<E> invokeOnPartition(Operation op) { return new PartitionInvocation( invocationContext, op, DEFAULT_TRY_COUNT, DEFAULT_TRY_PAUSE_MILLIS, DEFAULT_CALL_TIMEOUT, DEFAULT_DESERIALIZE_RESULT).invoke(); } @Override @SuppressWarnings("unchecked") public <E> InternalCompletableFuture<E> invokeOnTarget(String serviceName, Operation op, Address target) { op.setServiceName(serviceName); return new TargetInvocation(invocationContext, op, target, DEFAULT_TRY_COUNT, DEFAULT_TRY_PAUSE_MILLIS, DEFAULT_CALL_TIMEOUT, DEFAULT_DESERIALIZE_RESULT).invoke(); } @Override @SuppressWarnings("unchecked") public <V> void asyncInvokeOnPartition(String serviceName, Operation op, int partitionId, ExecutionCallback<V> callback) { op.setServiceName(serviceName).setPartitionId(partitionId).setReplicaIndex(DEFAULT_REPLICA_INDEX); InvocationFuture future = new PartitionInvocation(invocationContext, op, DEFAULT_TRY_COUNT, DEFAULT_TRY_PAUSE_MILLIS, DEFAULT_CALL_TIMEOUT, DEFAULT_DESERIALIZE_RESULT).invokeAsync(); if (callback != null) { future.andThen(callback); } } public void onStartAsyncOperation(Operation op) { asyncOperations.add(op); } public void onCompletionAsyncOperation(Operation op) { asyncOperations.remove(op); } // =============================== processing operation =============================== @Override public boolean isCallTimedOut(Operation op) { // Join operations should not be checked for timeout because caller is not member of this cluster // and can have a different clock. if (isJoinOperation(op)) { return false; } long callTimeout = op.getCallTimeout(); long invocationTime = op.getInvocationTime(); long expireTime = invocationTime + callTimeout; if (expireTime <= 0 || expireTime == Long.MAX_VALUE) { return false; } ClusterClock clusterClock = nodeEngine.getClusterService().getClusterClock(); long now = clusterClock.getClusterTime(); if (expireTime < now) { return true; } return false; } @Override public Map<Integer, Object> invokeOnAllPartitions(String serviceName, OperationFactory operationFactory) throws Exception { Map<Address, List<Integer>> memberPartitions = nodeEngine.getPartitionService().getMemberPartitionsMap(); InvokeOnPartitions invokeOnPartitions = new InvokeOnPartitions(this, serviceName, operationFactory, memberPartitions); return invokeOnPartitions.invoke(); } @Override public Map<Integer, Object> invokeOnPartitions(String serviceName, OperationFactory operationFactory, Collection<Integer> partitions) throws Exception { Map<Address, List<Integer>> memberPartitions = new HashMap<Address, List<Integer>>(3); InternalPartitionService partitionService = nodeEngine.getPartitionService(); for (int partition : partitions) { Address owner = partitionService.getPartitionOwnerOrWait(partition); if (!memberPartitions.containsKey(owner)) { memberPartitions.put(owner, new ArrayList<Integer>()); } memberPartitions.get(owner).add(partition); } InvokeOnPartitions invokeOnPartitions = new InvokeOnPartitions(this, serviceName, operationFactory, memberPartitions); return invokeOnPartitions.invoke(); } @Override public Map<Integer, Object> invokeOnPartitions(String serviceName, OperationFactory operationFactory, int[] partitions) throws Exception { return invokeOnPartitions(serviceName, operationFactory, toIntegerList(partitions)); } @Override public boolean send(Operation op, Address target) { return outboundOperationHandler.send(op, target); } public void onMemberLeft(MemberImpl member) { invocationMonitor.onMemberLeft(member); } public void reset() { invocationRegistry.reset(); } @Override public void provideMetrics(MetricsRegistry registry) { registry.scanAndRegister(this, "operation"); registry.collectMetrics(invocationRegistry, invocationMonitor, inboundResponseHandler, asyncInboundResponseHandler, operationExecutor); } public void start() { logger.finest("Starting OperationService"); initInvocationContext(); invocationMonitor.start(); operationExecutor.start(); asyncInboundResponseHandler.start(); slowOperationDetector.start(); } private void initInvocationContext() { ManagedExecutorService asyncExecutor = nodeEngine.getExecutionService().register( ExecutionService.ASYNC_EXECUTOR, Runtime.getRuntime().availableProcessors(), ASYNC_QUEUE_CAPACITY, ExecutorType.CONCRETE); this.invocationContext = new Invocation.Context( asyncExecutor, nodeEngine.getClusterService().getClusterClock(), nodeEngine.getClusterService(), node.connectionManager, node.nodeEngine.getExecutionService(), nodeEngine.getProperties().getMillis(OPERATION_CALL_TIMEOUT_MILLIS), invocationRegistry, invocationMonitor, nodeEngine.getLogger(Invocation.class), node, nodeEngine, nodeEngine.getPartitionService(), this, operationExecutor, retryCount, serializationService, nodeEngine.getThisAddress(), outboundOperationHandler); } /** * Shuts down invocation infrastructure. * New invocation requests will be rejected after shutdown and all pending invocations * will be notified with a failure response. */ public void shutdownInvocations() { logger.finest("Shutting down invocations"); invocationRegistry.shutdown(); invocationMonitor.shutdown(); asyncInboundResponseHandler.shutdown(); try { invocationMonitor.awaitTermination(TERMINATION_TIMEOUT_MILLIS); } catch (InterruptedException e) { //restore the interrupt. //todo: we need a better mechanism for dealing with interruption and waiting for termination Thread.currentThread().interrupt(); EmptyStatement.ignore(e); } } public void shutdownOperationExecutor() { logger.finest("Shutting down operation executors"); operationExecutor.shutdown(); slowOperationDetector.shutdown(); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2014 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.authentication; import java.awt.BorderLayout; import java.awt.Component; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.script.ScriptException; import javax.swing.DefaultComboBoxModel; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.border.Border; import javax.swing.border.EmptyBorder; import javax.swing.plaf.basic.BasicComboBoxRenderer; import net.sf.json.JSONObject; import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.control.Control; import org.parosproxy.paros.db.DatabaseException; import org.parosproxy.paros.db.RecordContext; import org.parosproxy.paros.extension.ExtensionHook; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.model.Session; import org.parosproxy.paros.network.HttpMessage; import org.parosproxy.paros.network.HttpSender; import org.parosproxy.paros.view.View; import org.zaproxy.zap.ZAP; import org.zaproxy.zap.authentication.GenericAuthenticationCredentials.GenericAuthenticationCredentialsOptionsPanel; import org.zaproxy.zap.extension.api.ApiDynamicActionImplementor; import org.zaproxy.zap.extension.api.ApiException; import org.zaproxy.zap.extension.api.ApiResponse; import org.zaproxy.zap.extension.authentication.AuthenticationAPI; import org.zaproxy.zap.extension.script.ExtensionScript; import org.zaproxy.zap.extension.script.ScriptType; import org.zaproxy.zap.extension.script.ScriptWrapper; import org.zaproxy.zap.model.Context; import org.zaproxy.zap.session.SessionManagementMethod; import org.zaproxy.zap.session.WebSession; import org.zaproxy.zap.users.User; import org.zaproxy.zap.utils.ApiUtils; import org.zaproxy.zap.utils.EncodingUtils; import org.zaproxy.zap.view.DynamicFieldsPanel; import org.zaproxy.zap.view.LayoutHelper; public class ScriptBasedAuthenticationMethodType extends AuthenticationMethodType { public static final String CONTEXT_CONFIG_AUTH_SCRIPT = AuthenticationMethod.CONTEXT_CONFIG_AUTH + ".script"; public static final String CONTEXT_CONFIG_AUTH_SCRIPT_NAME = CONTEXT_CONFIG_AUTH_SCRIPT + ".name"; public static final String CONTEXT_CONFIG_AUTH_SCRIPT_PARAMS = CONTEXT_CONFIG_AUTH_SCRIPT + ".params"; public static final int METHOD_IDENTIFIER = 4; private static final Logger log = LogManager.getLogger(ScriptBasedAuthenticationMethodType.class); /** The Constant SCRIPT_TYPE_AUTH. */ public static final String SCRIPT_TYPE_AUTH = "authentication"; private static final String API_METHOD_NAME = "scriptBasedAuthentication"; /** The SCRIPT ICON. */ private static final ImageIcon SCRIPT_ICON_AUTH = new ImageIcon(ZAP.class.getResource("/resource/icon/16/script-auth.png")); /** The Authentication method's name. */ private static final String METHOD_NAME = Constant.messages.getString("authentication.method.script.name"); private ExtensionScript extensionScript; public class ScriptBasedAuthenticationMethod extends AuthenticationMethod { private ScriptWrapper script; private String[] credentialsParamNames; private Map<String, String> paramValues; private HttpSender httpSender; protected HttpSender getHttpSender() { if (this.httpSender == null) { this.httpSender = new HttpSender( Model.getSingleton().getOptionsParam().getConnectionParam(), true, HttpSender.AUTHENTICATION_INITIATOR); } return httpSender; } /** * Load a script and fills in the method's filled according to the values specified by the * script. * * <p>If the method already had a loaded script and a set of values for the parameters, it * tries to provide new values for the new parameters if they match any previous parameter * names. * * @param scriptW the script wrapper * @throws IllegalArgumentException if an error occurs while loading the script. */ public void loadScript(ScriptWrapper scriptW) { AuthenticationScript script = getScriptInterfaceV2(scriptW); if (script == null) { script = getScriptInterface(scriptW); } if (script == null) { log.warn( "The script " + scriptW.getName() + " does not properly implement the Authentication Script interface."); throw new IllegalArgumentException( Constant.messages.getString( "authentication.method.script.dialog.error.text.interface", scriptW.getName())); } try { if (script instanceof AuthenticationScriptV2) { AuthenticationScriptV2 scriptV2 = (AuthenticationScriptV2) script; setLoggedInIndicatorPattern(scriptV2.getLoggedInIndicator()); setLoggedOutIndicatorPattern(scriptV2.getLoggedOutIndicator()); } String[] requiredParams = script.getRequiredParamsNames(); String[] optionalParams = script.getOptionalParamsNames(); this.credentialsParamNames = script.getCredentialsParamsNames(); if (log.isDebugEnabled()) { log.debug( "Loaded authentication script - required parameters: " + Arrays.toString(requiredParams) + " - optional parameters: " + Arrays.toString(optionalParams)); } // If there's an already loaded script, make sure we save its values and _try_ // to use them Map<String, String> oldValues = this.paramValues != null ? this.paramValues : Collections.<String, String>emptyMap(); this.paramValues = new HashMap<>(requiredParams.length + optionalParams.length); for (String param : requiredParams) this.paramValues.put(param, oldValues.get(param)); for (String param : optionalParams) this.paramValues.put(param, oldValues.get(param)); this.script = scriptW; log.info("Successfully loaded new script for ScriptBasedAuthentication: " + this); } catch (Exception e) { log.error("Error while loading authentication script", e); getScriptsExtension().handleScriptException(this.script, e); throw new IllegalArgumentException( Constant.messages.getString( "authentication.method.script.dialog.error.text.loading", e.getMessage())); } } @Override public String toString() { return "ScriptBasedAuthenticationMethod [script=" + script + ", paramValues=" + paramValues + ", credentialsParamNames=" + Arrays.toString(credentialsParamNames) + "]"; } @Override public boolean isConfigured() { return true; } @Override protected AuthenticationMethod duplicate() { ScriptBasedAuthenticationMethod method = new ScriptBasedAuthenticationMethod(); method.script = script; method.paramValues = this.paramValues != null ? new HashMap<>(this.paramValues) : null; method.credentialsParamNames = this.credentialsParamNames; return method; } @Override public boolean validateCreationOfAuthenticationCredentials() { if (credentialsParamNames != null) { return true; } if (View.isInitialised()) { View.getSingleton() .showMessageDialog( Constant.messages.getString( "authentication.method.script.dialog.error.text.notLoaded")); } return false; } @Override public AuthenticationCredentials createAuthenticationCredentials() { return new GenericAuthenticationCredentials(this.credentialsParamNames); } @Override public AuthenticationMethodType getType() { return new ScriptBasedAuthenticationMethodType(); } @Override public WebSession authenticate( SessionManagementMethod sessionManagementMethod, AuthenticationCredentials credentials, User user) throws UnsupportedAuthenticationCredentialsException { // type check if (!(credentials instanceof GenericAuthenticationCredentials)) { user.getAuthenticationState() .setLastAuthFailure("Credentials not GenericAuthenticationCredentials"); throw new UnsupportedAuthenticationCredentialsException( "Script based Authentication method only supports " + GenericAuthenticationCredentials.class.getSimpleName() + ". Received: " + credentials.getClass()); } GenericAuthenticationCredentials cred = (GenericAuthenticationCredentials) credentials; // Call the script to get an authenticated message from which we can then extract the // session AuthenticationScript script = getScriptInterfaceV2(this.script); if (script == null) { script = getScriptInterface(this.script); } if (script == null) { return null; } ExtensionScript.recordScriptCalledStats(this.script); HttpMessage msg = null; try { if (script instanceof AuthenticationScriptV2) { AuthenticationScriptV2 scriptV2 = (AuthenticationScriptV2) script; setLoggedInIndicatorPattern(scriptV2.getLoggedInIndicator()); setLoggedOutIndicatorPattern(scriptV2.getLoggedOutIndicator()); } msg = script.authenticate( new AuthenticationHelper( getHttpSender(), sessionManagementMethod, user), this.paramValues, cred); } catch (Exception e) { // Catch Exception instead of ScriptException and IOException because script engine // implementations // might throw other exceptions on script errors (e.g. // jdk.nashorn.internal.runtime.ECMAException) user.getAuthenticationState() .setLastAuthFailure( "Error running authentication script " + e.getMessage()); log.error( "An error occurred while trying to authenticate using the Authentication Script: " + this.script.getName(), e); getScriptsExtension().handleScriptException(this.script, e); return null; } if (msg.getRequestHeader().getURI() == null) { String error = String.format( "Auth request returned by the script '%s' does not have the request-target.", this.script.getName()); user.getAuthenticationState().setLastAuthFailure(error); log.error(error); error = "ERROR: " + error + "\n"; getScriptsExtension().handleScriptError(this.script, error); if (View.isInitialised()) { View.getSingleton().getOutputPanel().appendAsync(error); } return null; } if (this.isAuthenticated(msg, user, true)) { // Let the user know it worked user.getAuthenticationState().setLastAuthFailure(""); AuthenticationHelper.notifyOutputAuthSuccessful(msg); } else { // Let the user know it failed user.getAuthenticationState().setLastAuthFailure("User is not authenticated"); AuthenticationHelper.notifyOutputAuthFailure(msg); } // Add message to history AuthenticationHelper.addAuthMessageToHistory(msg); user.getAuthenticationState() .setLastAuthRequestHistoryId(msg.getHistoryRef().getHistoryId()); // Return the web session as extracted by the session management method return sessionManagementMethod.extractWebSession(msg); } @Override public ApiResponse getApiResponseRepresentation() { Map<String, String> values = new HashMap<>(); values.put("methodName", API_METHOD_NAME); values.put("scriptName", script.getName()); values.putAll(paramValues); return new AuthMethodApiResponseRepresentation<>(values); } @Override public void replaceUserDataInPollRequest(HttpMessage msg, User user) { AuthenticationHelper.replaceUserDataInRequest( msg, wrapKeys(this.paramValues), NULL_ENCODER); } } public class ScriptBasedAuthenticationMethodOptionsPanel extends AbstractAuthenticationMethodOptionsPanel { private static final long serialVersionUID = 7812841049435409987L; private final String SCRIPT_NAME_LABEL = Constant.messages.getString("authentication.method.script.field.label.scriptName"); private final String LABEL_NOT_LOADED = Constant.messages.getString("authentication.method.script.field.label.notLoaded"); private JComboBox<ScriptWrapper> scriptsComboBox; private JButton loadScriptButton; private ScriptBasedAuthenticationMethod method; private AuthenticationIndicatorsPanel indicatorsPanel; private ScriptWrapper loadedScript; private JPanel dynamicContentPanel; private DynamicFieldsPanel dynamicFieldsPanel; private String[] loadedCredentialParams; public ScriptBasedAuthenticationMethodOptionsPanel() { super(); initialize(); } @SuppressWarnings("unchecked") private void initialize() { this.setLayout(new GridBagLayout()); this.add(new JLabel(SCRIPT_NAME_LABEL), LayoutHelper.getGBC(0, 0, 1, 0.0d, 0.0d)); this.scriptsComboBox = new JComboBox<>(); this.scriptsComboBox.setRenderer(new ScriptWrapperRenderer(this)); this.add(this.scriptsComboBox, LayoutHelper.getGBC(1, 0, 1, 1.0d, 0.0d)); this.loadScriptButton = new JButton("Load"); this.add(this.loadScriptButton, LayoutHelper.getGBC(2, 0, 1, 0.0d, 0.0d)); this.loadScriptButton.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { loadScript((ScriptWrapper) scriptsComboBox.getSelectedItem(), true); } }); // Make sure the 'Load' button is disabled when nothing is selected this.loadScriptButton.setEnabled(false); this.scriptsComboBox.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { loadScriptButton.setEnabled(scriptsComboBox.getSelectedIndex() >= 0); } }); this.dynamicContentPanel = new JPanel(new BorderLayout()); this.add(this.dynamicContentPanel, LayoutHelper.getGBC(0, 1, 3, 1.0d, 0.0d)); this.dynamicContentPanel.add(new JLabel(LABEL_NOT_LOADED)); } @Override public void validateFields() throws IllegalStateException { if (this.loadedScript == null) { this.scriptsComboBox.requestFocusInWindow(); throw new IllegalStateException( Constant.messages.getString( "authentication.method.script.dialog.error.text.notLoadedNorConfigured")); } this.dynamicFieldsPanel.validateFields(); } @Override public void saveMethod() { this.method.script = (ScriptWrapper) this.scriptsComboBox.getSelectedItem(); // This method will also be called when switching panels to save a temporary state so // the state of the authentication method might not be valid if (this.dynamicFieldsPanel != null) this.method.paramValues = this.dynamicFieldsPanel.getFieldValues(); else this.method.paramValues = Collections.emptyMap(); if (this.loadedScript != null) this.method.credentialsParamNames = this.loadedCredentialParams; } @Override public void bindMethod(AuthenticationMethod method) throws UnsupportedAuthenticationMethodException { this.method = (ScriptBasedAuthenticationMethod) method; // Make sure the list of scripts is refreshed List<ScriptWrapper> scripts = getScriptsExtension().getScripts(SCRIPT_TYPE_AUTH); DefaultComboBoxModel<ScriptWrapper> model = new DefaultComboBoxModel<>(scripts.toArray(new ScriptWrapper[scripts.size()])); this.scriptsComboBox.setModel(model); this.scriptsComboBox.setSelectedItem(this.method.script); this.loadScriptButton.setEnabled(this.method.script != null); // Load the selected script, if any if (this.method.script != null) { loadScript(this.method.script, false); if (this.dynamicFieldsPanel != null) this.dynamicFieldsPanel.bindFieldValues(this.method.paramValues); } } @Override public void bindMethod( AuthenticationMethod method, AuthenticationIndicatorsPanel indicatorsPanel) throws UnsupportedAuthenticationMethodException { this.indicatorsPanel = indicatorsPanel; bindMethod(method); } @Override public AuthenticationMethod getMethod() { return this.method; } private void loadScript(ScriptWrapper scriptW, boolean adaptOldValues) { AuthenticationScript script = getScriptInterfaceV2(scriptW); if (script == null) { script = getScriptInterface(scriptW); } if (script == null) { log.warn( "The script " + scriptW.getName() + " does not properly implement the Authentication Script interface."); warnAndResetPanel( Constant.messages.getString( "authentication.method.script.dialog.error.text.interface", scriptW.getName())); return; } try { if (script instanceof AuthenticationScriptV2) { AuthenticationScriptV2 scriptV2 = (AuthenticationScriptV2) script; String toolTip = Constant.messages.getString( "authentication.method.script.dialog.loggedInOutIndicatorsInScript.toolTip"); String loggedInIndicator = scriptV2.getLoggedInIndicator(); this.method.setLoggedInIndicatorPattern(loggedInIndicator); this.indicatorsPanel.setLoggedInIndicatorPattern(loggedInIndicator); this.indicatorsPanel.setLoggedInIndicatorEnabled(false); this.indicatorsPanel.setLoggedInIndicatorToolTip(toolTip); String loggedOutIndicator = scriptV2.getLoggedOutIndicator(); this.method.setLoggedOutIndicatorPattern(loggedOutIndicator); this.indicatorsPanel.setLoggedOutIndicatorPattern(loggedOutIndicator); this.indicatorsPanel.setLoggedOutIndicatorEnabled(false); this.indicatorsPanel.setLoggedOutIndicatorToolTip(toolTip); } else { this.indicatorsPanel.setLoggedInIndicatorEnabled(true); this.indicatorsPanel.setLoggedInIndicatorToolTip(null); this.indicatorsPanel.setLoggedOutIndicatorEnabled(true); this.indicatorsPanel.setLoggedOutIndicatorToolTip(null); } String[] requiredParams = script.getRequiredParamsNames(); String[] optionalParams = script.getOptionalParamsNames(); this.loadedCredentialParams = script.getCredentialsParamsNames(); if (log.isDebugEnabled()) { log.debug( "Loaded authentication script - required parameters: " + Arrays.toString(requiredParams) + " - optional parameters: " + Arrays.toString(optionalParams)); } // If there's an already loaded script, make sure we save its values and _try_ // to place them in the new panel Map<String, String> oldValues = null; if (adaptOldValues && dynamicFieldsPanel != null) { oldValues = dynamicFieldsPanel.getFieldValues(); if (log.isDebugEnabled()) log.debug("Trying to adapt old values: " + oldValues); } this.dynamicFieldsPanel = new DynamicFieldsPanel(requiredParams, optionalParams); this.loadedScript = scriptW; if (adaptOldValues && oldValues != null) this.dynamicFieldsPanel.bindFieldValues(oldValues); this.dynamicContentPanel.removeAll(); this.dynamicContentPanel.add(dynamicFieldsPanel, BorderLayout.CENTER); this.dynamicContentPanel.revalidate(); } catch (Exception e) { getScriptsExtension().handleScriptException(scriptW, e); log.error("Error while calling authentication script", e); warnAndResetPanel( Constant.messages.getString( "authentication.method.script.dialog.error.text.loading", ExceptionUtils.getRootCauseMessage(e))); } } private void warnAndResetPanel(String errorMessage) { JOptionPane.showMessageDialog( this, errorMessage, Constant.messages.getString("authentication.method.script.dialog.error.title"), JOptionPane.ERROR_MESSAGE); this.loadedScript = null; this.scriptsComboBox.setSelectedItem(null); this.dynamicFieldsPanel = null; this.dynamicContentPanel.removeAll(); this.dynamicContentPanel.add(new JLabel(LABEL_NOT_LOADED), BorderLayout.CENTER); this.dynamicContentPanel.revalidate(); } } /** * A renderer for properly displaying the name of a {@link ScriptWrapper} in a ComboBox and * putting emphasis on loaded script. */ private static class ScriptWrapperRenderer extends BasicComboBoxRenderer { private static final long serialVersionUID = 3654541772447187317L; private static final Border BORDER = new EmptyBorder(2, 3, 3, 3); private ScriptBasedAuthenticationMethodOptionsPanel panel; public ScriptWrapperRenderer(ScriptBasedAuthenticationMethodOptionsPanel panel) { super(); this.panel = panel; } @Override @SuppressWarnings("rawtypes") public Component getListCellRendererComponent( JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); if (value != null) { setBorder(BORDER); ScriptWrapper item = (ScriptWrapper) value; if (panel.loadedScript == item) setText( "<html><b>" + StringEscapeUtils.unescapeHtml(item.getName()) + " (loaded)</b></html>"); else setText(item.getName()); } return this; } } @Override public void hook(ExtensionHook extensionHook) { // Hook up the Script Type if (getScriptsExtension() != null) { log.debug("Registering Script..."); getScriptsExtension() .registerScriptType( new ScriptType( SCRIPT_TYPE_AUTH, "authentication.method.script.type", SCRIPT_ICON_AUTH, false, new String[] {ScriptType.CAPABILITY_APPEND})); } } @Override public ScriptBasedAuthenticationMethod createAuthenticationMethod(int contextId) { return new ScriptBasedAuthenticationMethod(); } @Override public String getName() { return METHOD_NAME; } @Override public int getUniqueIdentifier() { return METHOD_IDENTIFIER; } @Override public AbstractAuthenticationMethodOptionsPanel buildOptionsPanel(Context uiSharedContext) { return new ScriptBasedAuthenticationMethodOptionsPanel(); } @Override public boolean hasOptionsPanel() { return true; } @Override public AbstractCredentialsOptionsPanel<? extends AuthenticationCredentials> buildCredentialsOptionsPanel( AuthenticationCredentials credentials, Context uiSharedContext) { return new GenericAuthenticationCredentialsOptionsPanel( (GenericAuthenticationCredentials) credentials); } @Override public boolean hasCredentialsOptionsPanel() { return true; } @Override public boolean isTypeForMethod(AuthenticationMethod method) { return (method instanceof ScriptBasedAuthenticationMethod); } @Override public ScriptBasedAuthenticationMethod loadMethodFromSession(Session session, int contextId) throws DatabaseException { ScriptBasedAuthenticationMethod method = createAuthenticationMethod(contextId); // Load the script and make sure it still exists and still follows the required interface this.loadMethod( method, session.getContextDataStrings(contextId, RecordContext.TYPE_AUTH_METHOD_FIELD_1), session.getContextDataStrings(contextId, RecordContext.TYPE_AUTH_METHOD_FIELD_2)); return method; } public void loadMethod( ScriptBasedAuthenticationMethod method, List<String> scripts, List<String> paramValuesS) { // Load the script and make sure it still exists and still follows the required interface String scriptName = ""; if (scripts != null && scripts.size() > 0) { scriptName = scripts.get(0); ScriptWrapper script = getScriptsExtension().getScript(scriptName); if (script == null) { log.error( "Unable to find script while loading Script Based Authentication Method for name: " + scriptName); if (View.isInitialised()) { View.getSingleton() .showMessageDialog( Constant.messages.getString( "authentication.method.script.load.errorScriptNotFound", scriptName)); } return; } log.info("Loaded script:" + script.getName()); method.script = script; // Check script interface and make sure we load the credentials parameter names AuthenticationScript s = getScriptInterfaceV2(script); if (s == null) { s = getScriptInterface(script); } if (s == null) { log.error( "Unable to load Script Based Authentication method. The script " + scriptName + " does not properly implement the Authentication Script interface."); return; } try { if (s instanceof AuthenticationScriptV2) { AuthenticationScriptV2 sV2 = (AuthenticationScriptV2) s; method.setLoggedInIndicatorPattern(sV2.getLoggedInIndicator()); method.setLoggedOutIndicatorPattern(sV2.getLoggedOutIndicator()); } method.credentialsParamNames = s.getCredentialsParamsNames(); } catch (Exception e) { getScriptsExtension().handleScriptException(script, e); } } // Load the parameter values Map<String, String> paramValues = null; if (paramValuesS != null && paramValuesS.size() > 0) { paramValues = EncodingUtils.stringToMap(paramValuesS.get(0)); method.paramValues = paramValues; } else { method.paramValues = new HashMap<>(); log.error( "Unable to load script parameter values loading Script Based Authentication Method for name: " + scriptName); } } @Override public void persistMethodToSession( Session session, int contextId, AuthenticationMethod authMethod) throws UnsupportedAuthenticationMethodException, DatabaseException { if (!(authMethod instanceof ScriptBasedAuthenticationMethod)) throw new UnsupportedAuthenticationMethodException( "Script based authentication type only supports: " + ScriptBasedAuthenticationMethod.class); ScriptBasedAuthenticationMethod method = (ScriptBasedAuthenticationMethod) authMethod; session.setContextData( contextId, RecordContext.TYPE_AUTH_METHOD_FIELD_1, method.script.getName()); session.setContextData( contextId, RecordContext.TYPE_AUTH_METHOD_FIELD_2, EncodingUtils.mapToString(method.paramValues)); } @Override public AuthenticationCredentials createAuthenticationCredentials() { // NOTE: This method will initialize a set of Credentials without any required parameters // and, thus, should be later modified explicitly (e.g. through calls to decode()) return new GenericAuthenticationCredentials(new String[0]); } @Override public Class<GenericAuthenticationCredentials> getAuthenticationCredentialsType() { return GenericAuthenticationCredentials.class; } private ExtensionScript getScriptsExtension() { if (extensionScript == null) extensionScript = Control.getSingleton().getExtensionLoader().getExtension(ExtensionScript.class); return extensionScript; } private AuthenticationScript getScriptInterface(ScriptWrapper script) { try { return getScriptsExtension().getInterface(script, AuthenticationScript.class); } catch (Exception e) { getScriptsExtension() .handleFailedScriptInterface( script, Constant.messages.getString( "authentication.method.script.dialog.error.text.interface", script.getName())); } return null; } private AuthenticationScriptV2 getScriptInterfaceV2(ScriptWrapper script) { try { AuthenticationScriptV2 authScript = getScriptsExtension().getInterface(script, AuthenticationScriptV2.class); if (authScript == null) { log.debug( "Script '{}' is not a AuthenticationScriptV2 interface.", script::getName); return null; } // Some ScriptEngines do not verify if all Interface Methods are contained in the // script. // So we must invoke them to ensure that they are defined in the loaded script! // Otherwise some ScriptEngines loads successfully AuthenticationScriptV2 without the // methods // getLoggedInIndicator() / getLoggedOutIndicator(). // Though it should fallback to interface AuthenticationScript. authScript.getLoggedInIndicator(); authScript.getLoggedOutIndicator(); return authScript; } catch (Exception ignore) { // The interface is optional, the AuthenticationScript will be checked after this one. if (log.isDebugEnabled()) { log.debug( "Script '" + script.getName() + "' is not a AuthenticationScriptV2 interface!", ignore); } } return null; } private static Map<String, String> wrapKeys(Map<String, String> kvPairs) { Map<String, String> map = new HashMap<>(); for (Entry<String, String> kv : kvPairs.entrySet()) { map.put( AuthenticationMethod.TOKEN_PREFIX + kv.getKey() + AuthenticationMethod.TOKEN_POSTFIX, kv.getValue()); } return map; } /** The Interface that needs to be implemented by an Authentication Script. */ public interface AuthenticationScript { public String[] getRequiredParamsNames(); public String[] getOptionalParamsNames(); public String[] getCredentialsParamsNames(); public HttpMessage authenticate( AuthenticationHelper helper, Map<String, String> paramsValues, GenericAuthenticationCredentials credentials) throws ScriptException; } /** * An {@code AuthenticationScript} that allows to specify the logged in/out indicators. * * @since 2.5.0 */ public interface AuthenticationScriptV2 extends AuthenticationScript { /** * Gets the logged in indicator pattern. * * @return the logged in indicator pattern */ String getLoggedInIndicator(); /** * Gets the logged out indicator pattern. * * @return the logged out indicator pattern */ String getLoggedOutIndicator(); } /* API related constants and methods. */ private static final String PARAM_SCRIPT_NAME = "scriptName"; private static final String PARAM_SCRIPT_CONFIG_PARAMS = "scriptConfigParams"; @Override public ApiDynamicActionImplementor getSetMethodForContextApiAction() { return new ApiDynamicActionImplementor( API_METHOD_NAME, new String[] {PARAM_SCRIPT_NAME}, new String[] {PARAM_SCRIPT_CONFIG_PARAMS}) { @Override public void handleAction(JSONObject params) throws ApiException { Context context = ApiUtils.getContextByParamId(params, AuthenticationAPI.PARAM_CONTEXT_ID); String scriptName = ApiUtils.getNonEmptyStringParam(params, PARAM_SCRIPT_NAME); // Prepare the method ScriptBasedAuthenticationMethod method = createAuthenticationMethod(context.getId()); // Load the script and make sure it exists and follows the required interface ScriptWrapper script = getScriptsExtension().getScript(scriptName); if (script == null) { log.error( "Unable to find script while loading Script Based Authentication Method for name: " + scriptName); throw new ApiException(ApiException.Type.SCRIPT_NOT_FOUND, scriptName); } else log.info("Loaded script for API:" + script.getName()); method.script = script; // Check script interface and make sure we load the credentials parameter names AuthenticationScript s = getScriptInterfaceV2(script); if (s == null) { s = getScriptInterface(script); } if (s == null) { log.error( "Unable to load Script Based Authentication method. The script " + script.getName() + " does not properly implement the Authentication Script interface."); throw new ApiException( ApiException.Type.BAD_SCRIPT_FORMAT, "Does not follow Authentication script interface"); } try { if (s instanceof AuthenticationScriptV2) { AuthenticationScriptV2 sV2 = (AuthenticationScriptV2) s; method.setLoggedInIndicatorPattern(sV2.getLoggedInIndicator()); method.setLoggedOutIndicatorPattern(sV2.getLoggedOutIndicator()); } method.credentialsParamNames = s.getCredentialsParamsNames(); // Load config param names + values and make sure all of the required ones // are there String[] requiredParams = s.getRequiredParamsNames(); String[] optionalParams = s.getOptionalParamsNames(); if (log.isDebugEnabled()) { log.debug( "Loaded authentication script - required parameters: " + Arrays.toString(requiredParams) + " - optional parameters: " + Arrays.toString(optionalParams)); } Map<String, String> paramValues = new HashMap<>(); for (String rp : requiredParams) { // If one of the required parameters is not present, it will throw // an exception String val = ApiUtils.getNonEmptyStringParam(params, rp); paramValues.put(rp, val); } for (String op : optionalParams) paramValues.put(op, ApiUtils.getOptionalStringParam(params, op)); method.paramValues = paramValues; if (log.isDebugEnabled()) log.debug("Loaded authentication script parameters:" + paramValues); } catch (ApiException e) { throw e; } catch (Exception e) { getScriptsExtension().handleScriptException(script, e); log.error( "Unable to load Script Based Authentication method. The script " + script.getName() + " contains errors."); throw new ApiException(ApiException.Type.BAD_SCRIPT_FORMAT, e.getMessage()); } context.setAuthenticationMethod(method); } }; } @Override public ApiDynamicActionImplementor getSetCredentialsForUserApiAction() { return GenericAuthenticationCredentials.getSetCredentialsForUserApiAction(this); } @Override public void exportData(Configuration config, AuthenticationMethod authMethod) { if (!(authMethod instanceof ScriptBasedAuthenticationMethod)) { throw new UnsupportedAuthenticationMethodException( "Script based authentication type only supports: " + ScriptBasedAuthenticationMethod.class.getName()); } ScriptBasedAuthenticationMethod method = (ScriptBasedAuthenticationMethod) authMethod; config.setProperty(CONTEXT_CONFIG_AUTH_SCRIPT_NAME, method.script.getName()); config.setProperty( CONTEXT_CONFIG_AUTH_SCRIPT_PARAMS, EncodingUtils.mapToString(method.paramValues)); } @Override public void importData(Configuration config, AuthenticationMethod authMethod) throws ConfigurationException { if (!(authMethod instanceof ScriptBasedAuthenticationMethod)) { throw new UnsupportedAuthenticationMethodException( "Script based authentication type only supports: " + ScriptBasedAuthenticationMethod.class.getName()); } ScriptBasedAuthenticationMethod method = (ScriptBasedAuthenticationMethod) authMethod; this.loadMethod( method, objListToStrList(config.getList(CONTEXT_CONFIG_AUTH_SCRIPT_NAME)), objListToStrList(config.getList(CONTEXT_CONFIG_AUTH_SCRIPT_PARAMS))); } private List<String> objListToStrList(List<Object> oList) { List<String> sList = new ArrayList<>(oList.size()); for (Object o : oList) { sList.add(o.toString()); } return sList; } }
/*L * Copyright Washington University in St. Louis * Copyright SemanticBits * Copyright Persistent Systems * Copyright Krishagni * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/catissue-core/LICENSE.txt for details. */ /** * <p>Title: ReportedProblemForm Class> * <p>Description: ReportedProblemForm Class is used to encapsulate all the request parameters passed * from ReportProblem webpage.</p> * Copyright: Copyright (c) year * Company: Washington University, School of Medicine, St. Louis. * @author Gautam Shetty * @version 1.00 * Created on Apr 11, 2005 */ package edu.wustl.catissuecore.actionForm; import javax.servlet.http.HttpServletRequest; import org.apache.struts.action.ActionError; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionMapping; import edu.wustl.catissuecore.domain.ReportedProblem; import edu.wustl.catissuecore.util.global.Constants; import edu.wustl.common.actionForm.AbstractActionForm; import edu.wustl.common.domain.AbstractDomainObject; import edu.wustl.common.util.global.ApplicationProperties; import edu.wustl.common.util.global.Status; import edu.wustl.common.util.global.Validator; import edu.wustl.common.util.logger.Logger; /** * ReportedProblemForm Class is used to encapsulate all the request parameters passed * from ReportProblem webpage. * @author gautam_shetty */ public class ReportedProblemForm extends AbstractActionForm { private static final long serialVersionUID = 1L; /** * logger Logger - Generic logger. */ private static Logger logger = Logger.getCommonLogger(ReportedProblemForm.class); /** * The subject of the reported problem. */ private String subject; /** * The email id of who reported the problem. */ private String from; /** * The message body of the reported problem. */ private String messageBody; private String comments; private String nameOfReporter; private String affiliation; /** * The affiliation of the user with the reported problem. * @return The affiliation of the reported problem. * @see #setAffiliation(String affiliation) */ public String getAffiliation() { return this.affiliation; } /** * @param affiliation The affiliation to set. */ public void setAffiliation(String affiliation) { this.affiliation = affiliation; } /** * The name of the user who reported the problem. * @return The name of the user who reported the problem. * @see #setNameOfReporter(String nameOfReporter) */ public String getNameOfReporter() { return this.nameOfReporter; } /** * @param nameOfReporter The nameOfReporter to set. */ public void setNameOfReporter(String nameOfReporter) { this.nameOfReporter = nameOfReporter; } /** * Initializes an empty problem. */ public ReportedProblemForm() { this.setActivityStatus(Status.ACTIVITY_STATUS_PENDING.toString()); this.clear(); } private void clear() { this.reset(); } /** * Resets all the fields. */ @Override protected void reset() { this.from = null; this.subject = null; this.messageBody = null; this.nameOfReporter = null; this.affiliation = null; } /** * Returns the email id of who reported the problem. * @return the email id of who reported the problem. * @see #setFrom(String) */ public String getFrom() { return this.from; } /** * Sets the email id of who reported the problem. * @param from the email id of who reported the problem. * @see #getFrom() */ public void setFrom(String from) { this.from = from; } /** * The message body of the reported problem. * @return The message body of the reported problem. * @see #setMessageBody(String) */ public String getMessageBody() { return this.messageBody; } /** * Sets the message body of the reported problem. * @param messageBody he message body of the reported problem. * @see #getMessageBody() */ public void setMessageBody(String messageBody) { this.messageBody = messageBody; } /** * Returns the subject of the reported problem. * @return the subject of the reported problem. * @see #setSubject(String) */ public String getSubject() { return this.subject; } /** * Sets the subject of the reported problem. * @param subject The subject to set. * @see #getSubject() */ public void setSubject(String subject) { this.subject = subject; } /** * @return Returns the comments. */ public String getComments() { return this.comments; } /** * @param comments The comments to set. */ public void setComments(String comments) { this.comments = comments; } /** * Returns the form id. * @return the form id. * @see AbstractActionForm#getFormId() */ @Override public int getFormId() { return Constants.REPORTED_PROBLEM_FORM_ID; } /** *@param abstractDomain An AbstractDomain Object * @see edu.wustl.catissuecore.actionForm.AbstractActionForm#setAllValues(edu.wustl.catissuecore.domain.AbstractDomain) */ public void setAllValues(AbstractDomainObject abstractDomain) { final ReportedProblem reportedProblem = (ReportedProblem) abstractDomain; this.from = reportedProblem.getFrom(); this.subject = reportedProblem.getSubject(); this.messageBody = reportedProblem.getMessageBody(); this.comments = reportedProblem.getComments(); this.setActivityStatus(reportedProblem.getActivityStatus()); this.affiliation = reportedProblem.getAffiliation(); this.nameOfReporter = reportedProblem.getNameOfReporter(); } /** * Overrides the validate method of ActionForm. * @return error ActionErrors instance * @param mapping Actionmapping instance * @param request HttpServletRequest instance */ @Override public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { final ActionErrors errors = new ActionErrors(); final Validator validator = new Validator(); try { if (this.getOperation() != null) { if (this.getOperation().equals(Constants.ADD)) { if (Validator.isEmpty(this.from)) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.item.required", ApplicationProperties .getValue("fields.from"))); } else { if (!validator.isValidEmailAddress(this.from)) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.item.format", ApplicationProperties .getValue("user.emailAddress"))); } } // Mandar 10-apr-06 : bugid :353 // Error messages should be in the same sequence as the sequence of fields on the page. if (Validator.isEmpty(this.nameOfReporter)) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.item.required", ApplicationProperties .getValue("fields.nameofreporter"))); } if (Validator.isEmpty(this.affiliation)) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.item.required", ApplicationProperties .getValue("fields.affiliation"))); } if (Validator.isEmpty(this.subject)) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.item.required", ApplicationProperties .getValue("fields.title"))); } if (Validator.isEmpty(this.messageBody)) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.item.required", ApplicationProperties .getValue("fields.message"))); } //to fix bug:1678 if (this.messageBody == null || this.messageBody.trim().length() >= Constants.MESSAGE_LENGTH) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "reportedProblem.error.message", ApplicationProperties .getValue("fields.message"), Integer .valueOf(Constants.MESSAGE_LENGTH))); } } if (this.getOperation().equals(Constants.EDIT)) { if (this.getActivityStatus().trim().equals(Constants.SELECT_OPTION)) { errors.add(ActionErrors.GLOBAL_ERROR, new ActionError( "errors.item.required", ApplicationProperties .getValue("reportProblem.status"))); } } } } catch (final Exception excp) { ReportedProblemForm.logger.error(excp.getMessage(), excp); excp.printStackTrace(); } return errors; } @Override public void setAddNewObjectIdentifier(String arg0, Long arg1) { // TODO Auto-generated method stub } }
/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.netconf.ctl.impl; import org.apache.commons.lang3.tuple.Pair; import org.apache.sshd.common.NamedFactory; import org.apache.sshd.common.util.threads.ThreadUtils; import org.apache.sshd.server.Command; import org.apache.sshd.server.Environment; import org.apache.sshd.server.ExitCallback; import org.apache.sshd.server.SessionAware; import org.apache.sshd.server.session.ServerSession; import org.onosproject.netconf.DatastoreId; import org.onosproject.netconf.ctl.impl.NetconfStreamThread.NetconfMessageState; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintWriter; import java.io.UnsupportedEncodingException; import java.nio.Buffer; import java.nio.ByteBuffer; import java.util.Collection; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.regex.Pattern; /** * Mocks a NETCONF Device to test the NETCONF Southbound Interface etc. * * Implements the 'netconf' subsystem on Apache SSH (Mina). * See SftpSubsystem for an example of another subsystem */ public class NetconfSshdTestSubsystem extends Thread implements Command, Runnable, SessionAware { protected final Logger log = LoggerFactory.getLogger(getClass()); public static class Factory implements NamedFactory<Command> { public static final String NAME = "netconf"; private final ExecutorService executors; private final boolean shutdownExecutor; public Factory() { this(null); } /** * @param executorService The {@link ExecutorService} to be used by * the {@link SftpSubsystem} command when starting execution. If * {@code null} then a single-threaded ad-hoc service is used. * <B>Note:</B> the service will <U>not</U> be shutdown when the * subsystem is closed - unless it is the ad-hoc service, which will be * shutdown regardless * @see #Factory(ExecutorService, boolean) */ public Factory(ExecutorService executorService) { this(executorService, false); } /** * @param executorService The {@link ExecutorService} to be used by * the {@link SftpSubsystem} command when starting execution. If * {@code null} then a single-threaded ad-hoc service is used. * @param shutdownOnExit If {@code true} the {@link ExecutorService#shutdownNow()} * will be called when subsystem terminates - unless it is the ad-hoc * service, which will be shutdown regardless */ public Factory(ExecutorService executorService, boolean shutdownOnExit) { executors = executorService; shutdownExecutor = shutdownOnExit; } public ExecutorService getExecutorService() { return executors; } public boolean isShutdownOnExit() { return shutdownExecutor; } @Override public Command create() { return new NetconfSshdTestSubsystem(getExecutorService(), isShutdownOnExit()); } @Override public String getName() { return NAME; } } /** * Properties key for the maximum of available open handles per session. */ private static final String CLOSE_SESSION = "<close-session"; private static final String END_PATTERN = "]]>]]>"; private static final String HASH = "#"; private static final String LF = "\n"; private static final String MSGLEN_REGEX_PATTERN = "\n#\\d+\n"; private static final String MSGLEN_PART_REGEX_PATTERN = "\\d+\n"; private static final String CHUNKED_END_REGEX_PATTERN = "\n##\n"; private ExecutorService executors; private boolean shutdownExecutor; private ExitCallback callback; private ServerSession session; private InputStream in; private OutputStream out; private OutputStream err; private Environment env; private Future<?> pendingFuture; private boolean closed = false; private NetconfMessageState state; private PrintWriter outputStream; private static final String SAMPLE_REQUEST = "<some-yang-element xmlns=\"some-namespace\">" + "<some-child-element/>" + "</some-yang-element>"; public static final Pattern GET_REQ_PATTERN = Pattern.compile("(<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>)\\R?" + "(<rpc message-id=\")[0-9]*(\" xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">)\\R?" + "(<get>)\\R?" + "(<filter type=\"subtree\">).*(</filter>)\\R?" + "(</get>)\\R?(</rpc>)\\R?", Pattern.DOTALL); public static final Pattern GET_CONFIG_REQ_PATTERN = Pattern.compile("(<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>)\\R?" + "(<rpc message-id=\")[0-9]*(\" xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">)\\R?" + "(<get-config>)\\R?" + "(<source>)\\R?((<" + DatastoreId.CANDIDATE.toString() + "/>)|(<" + DatastoreId.RUNNING.toString() + "/>)|(<" + DatastoreId.STARTUP.toString() + "/>))\\R?(</source>)\\R?" + "(<filter type=\"subtree\">).*(</filter>)\\R?" + "(</get-config>)\\R?(</rpc>)\\R?", Pattern.DOTALL); public static final Pattern COPY_CONFIG_REQ_PATTERN = Pattern.compile("(<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>)\\R?" + "(<rpc xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" message-id=\")[0-9]*(\">)\\R?" + "(<copy-config>)\\R?" + "(<target>\\R?" + "(" + "(<" + DatastoreId.CANDIDATE.toString() + "/>)|" + "(<" + DatastoreId.RUNNING.toString() + "/>)|" + "(<" + DatastoreId.STARTUP.toString() + "/>)" + ")\\R?" + "</target>)\\R?" + "(<source>)\\R?" + "(" + "(<config>)(.*)(</config>)|" + "(<" + DatastoreId.CANDIDATE.toString() + "/>)|" + "(<" + DatastoreId.RUNNING.toString() + "/>)|" + "(<" + DatastoreId.STARTUP.toString() + "/>)" + ")\\R?" + "(</source>)\\R?" + "(</copy-config>)\\R?(</rpc>)\\R?", Pattern.DOTALL); public static final Pattern UNLOCK_REQ_PATTERN = Pattern.compile("(<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>)\\R?" + "(<rpc xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" " + "message-id=\")[0-9]*(\">)\\R?" + "(<unlock>)\\R?" + "(<target>\\R?((<" + DatastoreId.CANDIDATE.toString() + "/>)|" + "(<" + DatastoreId.RUNNING.toString() + "/>)|" + "(<" + DatastoreId.STARTUP.toString() + "/>))\\R?</target>)\\R?" + "(</unlock>)\\R?(</rpc>)\\R?", Pattern.DOTALL); public static final Pattern LOCK_REQ_PATTERN = Pattern.compile("(<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>)\\R?" + "(<rpc xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" " + "message-id=\")[0-9]*(\">)\\R?" + "(<lock>)\\R?" + "(<target>\\R?((<" + DatastoreId.CANDIDATE.toString() + "/>)|" + "(<" + DatastoreId.RUNNING.toString() + "/>)|" + "(<" + DatastoreId.STARTUP.toString() + "/>))\\R?</target>)\\R?" + "(</lock>)\\R?(</rpc>)\\R?", Pattern.DOTALL); public static final Pattern EDIT_CONFIG_REQ_PATTERN = Pattern.compile("(<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>)\\R?" + "(<rpc message-id=\")[0-9]*(\") *(xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">)\\R?" + "(<edit-config>)\\R?" + "(<target>\\R?((<" + DatastoreId.CANDIDATE.toString() + "/>)|" + "(<" + DatastoreId.RUNNING.toString() + "/>)|" + "(<" + DatastoreId.STARTUP.toString() + "/>))\\R?</target>)\\R?" + "(<config xmlns:nc=\"urn:ietf:params:xml:ns:netconf:base:1.0\">)\\R?" + ".*" + "(</config>)\\R?(</edit-config>)\\R?(</rpc>)\\R?", Pattern.DOTALL); public static final Pattern HELLO_REQ_PATTERN_1_1 = Pattern.compile("(<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>)\\R?" + "(<hello xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">)\\R?" + "( *)(<capabilities>)\\R?" + "( *)(<capability>urn:ietf:params:netconf:base:1.0</capability>)\\R?" + "( *)(<capability>urn:ietf:params:netconf:base:1.1</capability>)\\R?" + "( *)(</capabilities>)\\R?" + "(</hello>)\\R? *", Pattern.DOTALL); public static final Pattern HELLO_REQ_PATTERN = Pattern.compile("(<\\?xml).*" + "(<hello xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">)\\R?" + "( *)(<capabilities>)\\R?" + "( *)(<capability>urn:ietf:params:netconf:base:1.0</capability>)\\R?" + "( *)(</capabilities>)\\R?" + "(</hello>)\\R? *", Pattern.DOTALL); public NetconfSshdTestSubsystem() { this(null); } /** * @param executorService The {@link ExecutorService} to be used by * the {@link SftpSubsystem} command when starting execution. If * {@code null} then a single-threaded ad-hoc service is used. * <b>Note:</b> the service will <U>not</U> be shutdown when the * subsystem is closed - unless it is the ad-hoc service * @see #SftpSubsystem(ExecutorService, boolean) */ public NetconfSshdTestSubsystem(ExecutorService executorService) { this(executorService, false); } /** * @param executorService The {@link ExecutorService} to be used by * the {@link SftpSubsystem} command when starting execution. If * {@code null} then a single-threaded ad-hoc service is used. * @param shutdownOnExit If {@code true} the {@link ExecutorService#shutdownNow()} * will be called when subsystem terminates - unless it is the ad-hoc * service, which will be shutdown regardless * @see ThreadUtils#newSingleThreadExecutor(String) */ public NetconfSshdTestSubsystem(ExecutorService executorService, boolean shutdownOnExit) { executors = executorService; if (executorService == null) { executors = ThreadUtils.newSingleThreadExecutor(getClass().getSimpleName()); shutdownExecutor = true; // we always close the ad-hoc executor service } else { shutdownExecutor = shutdownOnExit; } } @Override public void setSession(ServerSession session) { this.session = session; } @Override public void run() { BufferedReader bufferReader = new BufferedReader(new InputStreamReader(in)); boolean socketClosed = false; try { StringBuilder deviceRequestBuilder = new StringBuilder(); while (!socketClosed) { int cInt = bufferReader.read(); if (cInt == -1) { log.info("Netconf client sent error"); socketClosed = true; } char c = (char) cInt; state = state.evaluateChar(c); deviceRequestBuilder.append(c); if (state == NetconfMessageState.END_PATTERN) { String deviceRequest = deviceRequestBuilder.toString(); if (deviceRequest.equals(END_PATTERN)) { socketClosed = true; this.interrupt(); } else { deviceRequest = deviceRequest.replace(END_PATTERN, ""); Optional<Integer> messageId = NetconfStreamThread.getMsgId(deviceRequest); log.info("Client Request on session {}. MsgId {}: {}", session.getSessionId(), messageId, deviceRequest); synchronized (outputStream) { if (HELLO_REQ_PATTERN.matcher(deviceRequest).matches()) { String helloReply = getTestHelloReply(Optional.of(ByteBuffer.wrap( session.getSessionId()).asLongBuffer().get()), false); outputStream.write(helloReply + END_PATTERN); outputStream.flush(); } else if (HELLO_REQ_PATTERN_1_1.matcher(deviceRequest).matches()) { String helloReply = getTestHelloReply(Optional.of(ByteBuffer.wrap( session.getSessionId()).asLongBuffer().get()), true); outputStream.write(helloReply + END_PATTERN); outputStream.flush(); } else { Pair<String, Boolean> replyClosedPair = dealWithRequest(deviceRequest, messageId); String reply = replyClosedPair.getLeft(); if (reply != null) { Boolean newSockedClosed = replyClosedPair.getRight(); socketClosed = newSockedClosed.booleanValue(); outputStream.write(reply + END_PATTERN); outputStream.flush(); } } } deviceRequestBuilder.setLength(0); } } else if (state == NetconfMessageState.END_CHUNKED_PATTERN) { String deviceRequest = deviceRequestBuilder.toString(); if (!validateChunkedFraming(deviceRequest)) { log.error("Netconf client send badly framed message {}", deviceRequest); } else { deviceRequest = deviceRequest.replaceAll(MSGLEN_REGEX_PATTERN, ""); deviceRequest = deviceRequest.replaceAll(CHUNKED_END_REGEX_PATTERN, ""); Optional<Integer> messageId = NetconfStreamThread.getMsgId(deviceRequest); log.info("Client Request on session {}. MsgId {}: {}", session.getSessionId(), messageId, deviceRequest); synchronized (outputStream) { if (HELLO_REQ_PATTERN.matcher(deviceRequest).matches()) { String helloReply = getTestHelloReply(Optional.of(ByteBuffer.wrap( session.getSessionId()).asLongBuffer().get()), true); outputStream.write(helloReply + END_PATTERN); outputStream.flush(); } else { Pair<String, Boolean> replyClosedPair = dealWithRequest(deviceRequest, messageId); String reply = replyClosedPair.getLeft(); if (reply != null) { Boolean newSockedClosed = replyClosedPair.getRight(); socketClosed = newSockedClosed.booleanValue(); outputStream.write(formatChunkedMessage(reply)); outputStream.flush(); } } } } deviceRequestBuilder.setLength(0); } } } catch (Throwable t) { if (!socketClosed && !(t instanceof EOFException)) { // Ignore log.error("Exception caught in NETCONF Server subsystem", t.getMessage()); } } finally { try { bufferReader.close(); } catch (IOException ioe) { log.error("Could not close DataInputStream", ioe); } callback.onExit(0); } } private boolean validateChunkedFraming(String reply) { String[] strs = reply.split(LF + HASH); int strIndex = 0; while (strIndex < strs.length) { String str = strs[strIndex]; if ((str.equals(HASH + LF))) { return true; } if (!str.equals("")) { try { if (str.equals(LF)) { return false; } int len = Integer.parseInt(str.split(LF)[0]); if (str.split(MSGLEN_PART_REGEX_PATTERN)[1].getBytes("UTF-8").length != len) { return false; } } catch (NumberFormatException e) { return false; } catch (UnsupportedEncodingException e) { e.printStackTrace(); } } strIndex++; } return true; } private Pair<String, Boolean> dealWithRequest(String deviceRequest, Optional<Integer> messageId) { if (EDIT_CONFIG_REQ_PATTERN.matcher(deviceRequest).matches() || COPY_CONFIG_REQ_PATTERN.matcher(deviceRequest).matches() || LOCK_REQ_PATTERN.matcher(deviceRequest).matches() || UNLOCK_REQ_PATTERN.matcher(deviceRequest).matches()) { return Pair.of(getOkReply(messageId), false); } else if (GET_CONFIG_REQ_PATTERN.matcher(deviceRequest).matches() || GET_REQ_PATTERN.matcher(deviceRequest).matches()) { return Pair.of(getGetReply(messageId), false); } else if (deviceRequest.contains(CLOSE_SESSION)) { return Pair.of(getOkReply(messageId), true); } else { log.error("Unexpected NETCONF message structure on session {} : {}", ByteBuffer.wrap( session.getSessionId()).asLongBuffer().get(), deviceRequest); return null; } } private String formatChunkedMessage(String message) { if (message.endsWith(END_PATTERN)) { message = message.split(END_PATTERN)[0]; } if (!message.startsWith(LF + HASH)) { try { message = LF + HASH + message.getBytes("UTF-8").length + LF + message + LF + HASH + HASH + LF; } catch (UnsupportedEncodingException e) { e.printStackTrace(); } } return message; } @Override public void setInputStream(InputStream in) { this.in = in; } @Override public void setOutputStream(OutputStream out) { this.out = out; } @Override public void setErrorStream(OutputStream err) { this.err = err; } @Override public void setExitCallback(ExitCallback callback) { this.callback = callback; } @Override public void start(Environment env) throws IOException { this.env = env; state = NetconfMessageState.NO_MATCHING_PATTERN; outputStream = new PrintWriter(out, false); try { pendingFuture = executors.submit(this); } catch (RuntimeException e) { // e.g., RejectedExecutionException log.error("Failed (" + e.getClass().getSimpleName() + ") to start command: " + e.getMessage(), e); throw new IOException(e); } } @Override public void interrupt() { destroy(); } @Override public void destroy() { // if thread has not completed, cancel it if ((pendingFuture != null) && (!pendingFuture.isDone())) { boolean result = pendingFuture.cancel(true); // TODO consider waiting some reasonable (?) amount of time for cancellation if (log.isDebugEnabled()) { log.debug("destroy() - cancel pending future=" + result); } } pendingFuture = null; if ((executors != null) && shutdownExecutor) { Collection<Runnable> runners = executors.shutdownNow(); if (log.isDebugEnabled()) { log.debug("destroy() - shutdown executor service - runners count=" + runners.size()); } } executors = null; if (!closed) { if (log.isDebugEnabled()) { log.debug("destroy() - mark as closed"); } closed = true; } outputStream.close(); } protected void process(Buffer buffer) throws IOException { log.warn("Received buffer:" + buffer); } public static String getTestHelloReply(Collection<String> capabilities, Optional<Long> sessionId) { StringBuilder sb = new StringBuilder(); sb.append("<hello xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\">"); sb.append("<capabilities>"); capabilities.forEach(capability -> { sb.append("<capability>").append(capability).append("</capability>"); }); sb.append("</capabilities>"); if (sessionId.isPresent()) { sb.append("<session-id>"); sb.append(sessionId.get().toString()); sb.append("</session-id>"); } sb.append("</hello>"); return sb.toString(); } public static String getTestHelloReply(Optional<Long> sessionId, boolean useChunkedFraming) { if (useChunkedFraming) { return getTestHelloReply(NetconfSessionMinaImplTest.DEFAULT_CAPABILITIES_1_1, sessionId); } else { return getTestHelloReply(NetconfSessionMinaImplTest.DEFAULT_CAPABILITIES, sessionId); } } public static String getGetReply(Optional<Integer> messageId) { StringBuilder sb = new StringBuilder("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); sb.append("<rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" "); if (messageId.isPresent()) { sb.append("message-id=\""); sb.append(String.valueOf(messageId.get())); sb.append("\">"); } sb.append("<data>\n"); sb.append(SAMPLE_REQUEST); sb.append("</data>\n"); sb.append("</rpc-reply>"); return sb.toString(); } public static String getOkReply(Optional<Integer> messageId) { StringBuilder sb = new StringBuilder("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); sb.append("<rpc-reply xmlns=\"urn:ietf:params:xml:ns:netconf:base:1.0\" "); if (messageId.isPresent()) { sb.append("message-id=\""); sb.append(String.valueOf(messageId.get())); sb.append("\">"); } sb.append("<ok/>"); sb.append("</rpc-reply>"); return sb.toString(); } }
package se.ifkgoteborg.stat.model; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.Table; import javax.persistence.Transient; import java.io.Serializable; /** * Keeps track of statistics for a single game in a structured manner * @author Erik * */ @Entity @Table(name="game_stats") public class GameStatistics implements Serializable { @Id @GeneratedValue private Long id; private Integer shotsOnGoalHomeTeam = null; private Integer shotsOnGoalAwayTeam = null; private Integer shotsOffGoalHomeTeam = null; private Integer shotsOffGoalAwayTeam = null; private Integer offsidesHomeTeam = null; private Integer offsidesAwayTeam = null; private Integer cornersHomeTeam = null; private Integer cornersAwayTeam = null; private Integer freekicksHomeTeam = null; private Integer freekicksAwayTeam = null; private Integer throwinsHomeTeam = null; private Integer throwinsAwayTeam = null; private Integer possessionHomeTeam = 50; private Integer possessionAwayTeam = 50; public Long getId() { return id; } public void setId(Long id) { this.id = id; } @Transient public String getShots() { return getShotsHomeTeam() + "-" + getShotsAwayTeam(); } @Transient public String getShotsOnGoal() { return getShotsOnGoalHomeTeam() + "-" + getShotsOnGoalAwayTeam(); } @Transient public String getShotsOffGoal() { return getShotsOffGoalHomeTeam() + "-" + getShotsOffGoalAwayTeam(); } @Transient public String getOffsides() { return getOffsidesHomeTeam() + "-" + getOffsidesAwayTeam(); } @Transient public String getThrowins() { return getThrowinsHomeTeam() + "-" + getThrowinsAwayTeam(); } @Transient public String getFreekicks() { return getFreekicksHomeTeam() + "-" + getFreekicksAwayTeam(); } @Transient public String getPossession() { return getPossessionHomeTeam() + "-" + getPossessionAwayTeam(); } @Transient public String getCorners() { return getCornersHomeTeam() + "-" + getCornersAwayTeam(); } @Transient public Integer getShotsHomeTeam() { return shotsOnGoalHomeTeam + shotsOffGoalHomeTeam; } @Transient public Integer getShotsAwayTeam() { return shotsOnGoalAwayTeam + shotsOffGoalAwayTeam; } public Integer getShotsOnGoalHomeTeam() { return shotsOnGoalHomeTeam; } public void setShotsOnGoalHomeTeam(Integer shotsOnGoalHomeTeam) { this.shotsOnGoalHomeTeam = shotsOnGoalHomeTeam; } public Integer getShotsOnGoalAwayTeam() { return shotsOnGoalAwayTeam; } public void setShotsOnGoalAwayTeam(Integer shotsOnGoalAwayTeam) { this.shotsOnGoalAwayTeam = shotsOnGoalAwayTeam; } public Integer getShotsOffGoalHomeTeam() { return shotsOffGoalHomeTeam; } public void setShotsOffGoalHomeTeam(Integer shotsOffGoalHomeTeam) { this.shotsOffGoalHomeTeam = shotsOffGoalHomeTeam; } public Integer getShotsOffGoalAwayTeam() { return shotsOffGoalAwayTeam; } public void setShotsOffGoalAwayTeam(Integer shotsOffGoalAwayTeam) { this.shotsOffGoalAwayTeam = shotsOffGoalAwayTeam; } public Integer getOffsidesHomeTeam() { return offsidesHomeTeam; } public void setOffsidesHomeTeam(Integer offsidesHomeTeam) { this.offsidesHomeTeam = offsidesHomeTeam; } public Integer getOffsidesAwayTeam() { return offsidesAwayTeam; } public void setOffsidesAwayTeam(Integer offsidesAwayTeam) { this.offsidesAwayTeam = offsidesAwayTeam; } public Integer getCornersHomeTeam() { return cornersHomeTeam; } public void setCornersHomeTeam(Integer cornersHomeTeam) { this.cornersHomeTeam = cornersHomeTeam; } public Integer getCornersAwayTeam() { return cornersAwayTeam; } public void setCornersAwayTeam(Integer cornersAwayTeam) { this.cornersAwayTeam = cornersAwayTeam; } public Integer getFreekicksHomeTeam() { return freekicksHomeTeam; } public void setFreekicksHomeTeam(Integer freekicksHomeTeam) { this.freekicksHomeTeam = freekicksHomeTeam; } public Integer getFreekicksAwayTeam() { return freekicksAwayTeam; } public void setFreekicksAwayTeam(Integer freekicksAwayTeam) { this.freekicksAwayTeam = freekicksAwayTeam; } public Integer getThrowinsHomeTeam() { return throwinsHomeTeam; } public void setThrowinsHomeTeam(Integer throwinsHomeTeam) { this.throwinsHomeTeam = throwinsHomeTeam; } public Integer getThrowinsAwayTeam() { return throwinsAwayTeam; } public void setThrowinsAwayTeam(Integer throwinsAwayTeam) { this.throwinsAwayTeam = throwinsAwayTeam; } public Integer getPossessionHomeTeam() { return possessionHomeTeam; } public void setPossessionHomeTeam(Integer possessionHomeTeam) { this.possessionHomeTeam = possessionHomeTeam; } public Integer getPossessionAwayTeam() { return possessionAwayTeam; } public void setPossessionAwayTeam(Integer possessionAwayTeam) { this.possessionAwayTeam = possessionAwayTeam; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.commitlog; import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import com.google.common.collect.Iterables; import com.google.common.util.concurrent.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.config.Schema; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.Keyspace; import org.apache.cassandra.db.Mutation; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.utils.Pair; import org.apache.cassandra.utils.concurrent.WaitQueue; import org.apache.cassandra.utils.JVMStabilityInspector; import org.apache.cassandra.utils.WrappedRunnable; import static org.apache.cassandra.db.commitlog.CommitLogSegment.Allocation; /** * Performs eager-creation of commit log segments in a background thread. All the * public methods are thread safe. */ public class CommitLogSegmentManager { static final Logger logger = LoggerFactory.getLogger(CommitLogSegmentManager.class); /** * Queue of work to be done by the manager thread, also used to wake the thread to perform segment allocation. */ private final BlockingQueue<Runnable> segmentManagementTasks = new LinkedBlockingQueue<>(); /** Segments that are ready to be used. Head of the queue is the one we allocate writes to */ private final ConcurrentLinkedQueue<CommitLogSegment> availableSegments = new ConcurrentLinkedQueue<>(); /** Active segments, containing unflushed data */ private final ConcurrentLinkedQueue<CommitLogSegment> activeSegments = new ConcurrentLinkedQueue<>(); /** The segment we are currently allocating commit log records to */ private volatile CommitLogSegment allocatingFrom = null; private final WaitQueue hasAvailableSegments = new WaitQueue(); /** * Tracks commitlog size, in multiples of the segment size. We need to do this so we can "promise" size * adjustments ahead of actually adding/freeing segments on disk, so that the "evict oldest segment" logic * can see the effect of recycling segments immediately (even though they're really happening asynchronously * on the manager thread, which will take a ms or two). */ private final AtomicLong size = new AtomicLong(); /** * New segment creation is initially disabled because we'll typically get some "free" segments * recycled after log replay. */ volatile boolean createReserveSegments = false; private Thread managerThread; private volatile boolean run = true; private final CommitLog commitLog; CommitLogSegmentManager(final CommitLog commitLog) { this.commitLog = commitLog; } void start() { // The run loop for the manager thread Runnable runnable = new WrappedRunnable() { public void runMayThrow() throws Exception { while (run) { try { Runnable task = segmentManagementTasks.poll(); if (task == null) { // if we have no more work to do, check if we should create a new segment if (availableSegments.isEmpty() && (activeSegments.isEmpty() || createReserveSegments)) { logger.trace("No segments in reserve; creating a fresh one"); // TODO : some error handling in case we fail to create a new segment availableSegments.add(CommitLogSegment.createSegment(commitLog)); hasAvailableSegments.signalAll(); } // flush old Cfs if we're full long unused = unusedCapacity(); if (unused < 0) { List<CommitLogSegment> segmentsToRecycle = new ArrayList<>(); long spaceToReclaim = 0; for (CommitLogSegment segment : activeSegments) { if (segment == allocatingFrom) break; segmentsToRecycle.add(segment); spaceToReclaim += DatabaseDescriptor.getCommitLogSegmentSize(); if (spaceToReclaim + unused >= 0) break; } flushDataFrom(segmentsToRecycle, false); } try { // wait for new work to be provided task = segmentManagementTasks.take(); } catch (InterruptedException e) { throw new AssertionError(); } } task.run(); } catch (Throwable t) { JVMStabilityInspector.inspectThrowable(t); if (!CommitLog.handleCommitError("Failed managing commit log segments", t)) return; // sleep some arbitrary period to avoid spamming CL Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); } } } }; run = true; managerThread = new Thread(runnable, "COMMIT-LOG-ALLOCATOR"); managerThread.start(); } /** * Reserve space in the current segment for the provided mutation or, if there isn't space available, * create a new segment. * * @return the provided Allocation object */ public Allocation allocate(Mutation mutation, int size) { CommitLogSegment segment = allocatingFrom(); Allocation alloc; while ( null == (alloc = segment.allocate(mutation, size)) ) { // failed to allocate, so move to a new segment with enough room advanceAllocatingFrom(segment); segment = allocatingFrom; } return alloc; } // simple wrapper to ensure non-null value for allocatingFrom; only necessary on first call CommitLogSegment allocatingFrom() { CommitLogSegment r = allocatingFrom; if (r == null) { advanceAllocatingFrom(null); r = allocatingFrom; } return r; } /** * Fetches a new segment from the queue, creating a new one if necessary, and activates it */ private void advanceAllocatingFrom(CommitLogSegment old) { while (true) { CommitLogSegment next; synchronized (this) { // do this in a critical section so we can atomically remove from availableSegments and add to allocatingFrom/activeSegments // see https://issues.apache.org/jira/browse/CASSANDRA-6557?focusedCommentId=13874432&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-13874432 if (allocatingFrom != old) return; next = availableSegments.poll(); if (next != null) { allocatingFrom = next; activeSegments.add(next); } } if (next != null) { if (old != null) { // Now we can run the user defined command just after switching to the new commit log. // (Do this here instead of in the recycle call so we can get a head start on the archive.) commitLog.archiver.maybeArchive(old); // ensure we don't continue to use the old file; not strictly necessary, but cleaner to enforce it old.discardUnusedTail(); } // request that the CL be synced out-of-band, as we've finished a segment commitLog.requestExtraSync(); return; } // no more segments, so register to receive a signal when not empty WaitQueue.Signal signal = hasAvailableSegments.register(commitLog.metrics.waitingOnSegmentAllocation.time()); // trigger the management thread; this must occur after registering // the signal to ensure we are woken by any new segment creation wakeManager(); // check if the queue has already been added to before waiting on the signal, to catch modifications // that happened prior to registering the signal; *then* check to see if we've been beaten to making the change if (!availableSegments.isEmpty() || allocatingFrom != old) { signal.cancel(); // if we've been beaten, just stop immediately if (allocatingFrom != old) return; // otherwise try again, as there should be an available segment continue; } // can only reach here if the queue hasn't been inserted into // before we registered the signal, as we only remove items from the queue // after updating allocatingFrom. Can safely block until we are signalled // by the allocator that new segments have been published signal.awaitUninterruptibly(); } } private void wakeManager() { // put a NO-OP on the queue, to trigger management thread (and create a new segment if necessary) segmentManagementTasks.add(Runnables.doNothing()); } /** * Switch to a new segment, regardless of how much is left in the current one. * * Flushes any dirty CFs for this segment and any older segments, and then recycles * the segments */ void forceRecycleAll(Iterable<UUID> droppedCfs) { List<CommitLogSegment> segmentsToRecycle = new ArrayList<>(activeSegments); CommitLogSegment last = segmentsToRecycle.get(segmentsToRecycle.size() - 1); advanceAllocatingFrom(last); // wait for the commit log modifications last.waitForModifications(); // make sure the writes have materialized inside of the memtables by waiting for all outstanding writes // on the relevant keyspaces to complete Set<Keyspace> keyspaces = new HashSet<>(); for (UUID cfId : last.getDirtyCFIDs()) { ColumnFamilyStore cfs = Schema.instance.getColumnFamilyStoreInstance(cfId); if (cfs != null) keyspaces.add(cfs.keyspace); } for (Keyspace keyspace : keyspaces) keyspace.writeOrder.awaitNewBarrier(); // flush and wait for all CFs that are dirty in segments up-to and including 'last' Future<?> future = flushDataFrom(segmentsToRecycle, true); try { future.get(); for (CommitLogSegment segment : activeSegments) for (UUID cfId : droppedCfs) segment.markClean(cfId, segment.getContext()); // now recycle segments that are unused, as we may not have triggered a discardCompletedSegments() // if the previous active segment was the only one to recycle (since an active segment isn't // necessarily dirty, and we only call dCS after a flush). for (CommitLogSegment segment : activeSegments) if (segment.isUnused()) recycleSegment(segment); CommitLogSegment first; if ((first = activeSegments.peek()) != null && first.id <= last.id) logger.error("Failed to force-recycle all segments; at least one segment is still in use with dirty CFs."); } catch (Throwable t) { // for now just log the error and return false, indicating that we failed logger.error("Failed waiting for a forced recycle of in-use commit log segments", t); } } /** * Indicates that a segment is no longer in use and that it should be recycled. * * @param segment segment that is no longer in use */ void recycleSegment(final CommitLogSegment segment) { boolean archiveSuccess = commitLog.archiver.maybeWaitForArchiving(segment.getName()); activeSegments.remove(segment); // if archiving (command) was not successful then leave the file alone. don't delete or recycle. discardSegment(segment, archiveSuccess); } /** * Differs from the above because it can work on any file instead of just existing * commit log segments managed by this manager. * * @param file segment file that is no longer in use. */ void recycleSegment(final File file) { // (don't decrease managed size, since this was never a "live" segment) logger.trace("(Unopened) segment {} is no longer needed and will be deleted now", file); FileUtils.deleteWithConfirm(file); } /** * Indicates that a segment file should be deleted. * * @param segment segment to be discarded */ private void discardSegment(final CommitLogSegment segment, final boolean deleteFile) { logger.trace("Segment {} is no longer active and will be deleted {}", segment, deleteFile ? "now" : "by the archive script"); segmentManagementTasks.add(new Runnable() { public void run() { segment.discard(deleteFile); } }); } /** * Adjust the tracked on-disk size. Called by individual segments to reflect writes, allocations and discards. * @param addedSize */ void addSize(long addedSize) { size.addAndGet(addedSize); } /** * @return the space (in bytes) used by all segment files. */ public long onDiskSize() { return size.get(); } private long unusedCapacity() { long total = DatabaseDescriptor.getTotalCommitlogSpaceInMB() * 1024 * 1024; long currentSize = size.get(); logger.trace("Total active commitlog segment space used is {} out of {}", currentSize, total); return total - currentSize; } /** * @param name the filename to check * @return true if file is managed by this manager. */ public boolean manages(String name) { for (CommitLogSegment segment : Iterables.concat(activeSegments, availableSegments)) if (segment.getName().equals(name)) return true; return false; } /** * Throws a flag that enables the behavior of keeping at least one spare segment * available at all times. */ void enableReserveSegmentCreation() { createReserveSegments = true; wakeManager(); } /** * Force a flush on all CFs that are still dirty in @param segments. * * @return a Future that will finish when all the flushes are complete. */ private Future<?> flushDataFrom(List<CommitLogSegment> segments, boolean force) { if (segments.isEmpty()) return Futures.immediateFuture(null); final ReplayPosition maxReplayPosition = segments.get(segments.size() - 1).getContext(); // a map of CfId -> forceFlush() to ensure we only queue one flush per cf final Map<UUID, ListenableFuture<?>> flushes = new LinkedHashMap<>(); for (CommitLogSegment segment : segments) { for (UUID dirtyCFId : segment.getDirtyCFIDs()) { Pair<String,String> pair = Schema.instance.getCF(dirtyCFId); if (pair == null) { // even though we remove the schema entry before a final flush when dropping a CF, // it's still possible for a writer to race and finish his append after the flush. logger.trace("Marking clean CF {} that doesn't exist anymore", dirtyCFId); segment.markClean(dirtyCFId, segment.getContext()); } else if (!flushes.containsKey(dirtyCFId)) { String keyspace = pair.left; final ColumnFamilyStore cfs = Keyspace.open(keyspace).getColumnFamilyStore(dirtyCFId); // can safely call forceFlush here as we will only ever block (briefly) for other attempts to flush, // no deadlock possibility since switchLock removal flushes.put(dirtyCFId, force ? cfs.forceFlush() : cfs.forceFlush(maxReplayPosition)); } } } return Futures.allAsList(flushes.values()); } /** * Stops CL, for testing purposes. DO NOT USE THIS OUTSIDE OF TESTS. * Only call this after the AbstractCommitLogService is shut down. */ public void stopUnsafe(boolean deleteSegments) { logger.trace("CLSM closing and clearing existing commit log segments..."); createReserveSegments = false; awaitManagementTasksCompletion(); shutdown(); try { awaitTermination(); } catch (InterruptedException e) { throw new RuntimeException(e); } for (CommitLogSegment segment : activeSegments) closeAndDeleteSegmentUnsafe(segment, deleteSegments); activeSegments.clear(); for (CommitLogSegment segment : availableSegments) closeAndDeleteSegmentUnsafe(segment, deleteSegments); availableSegments.clear(); allocatingFrom = null; segmentManagementTasks.clear(); size.set(0L); logger.trace("CLSM done with closing and clearing existing commit log segments."); } // Used by tests only. void awaitManagementTasksCompletion() { while (!segmentManagementTasks.isEmpty()) Thread.yield(); // The last management task is not yet complete. Wait a while for it. Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); // TODO: If this functionality is required by anything other than tests, signalling must be used to ensure // waiting completes correctly. } private static void closeAndDeleteSegmentUnsafe(CommitLogSegment segment, boolean delete) { try { segment.discard(delete); } catch (AssertionError ignored) { // segment file does not exist } } /** * Initiates the shutdown process for the management thread. */ public void shutdown() { run = false; wakeManager(); } /** * Returns when the management thread terminates. */ public void awaitTermination() throws InterruptedException { managerThread.join(); for (CommitLogSegment segment : activeSegments) segment.close(); for (CommitLogSegment segment : availableSegments) segment.close(); CompressedSegment.shutdown(); } /** * @return a read-only collection of the active commit log segments */ Collection<CommitLogSegment> getActiveSegments() { return Collections.unmodifiableCollection(activeSegments); } }
package org.apache.maven.plugin.gpg; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.deployer.ArtifactDeployer; import org.apache.maven.artifact.deployer.ArtifactDeploymentException; import org.apache.maven.artifact.factory.ArtifactFactory; import org.apache.maven.artifact.metadata.ArtifactMetadata; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.repository.ArtifactRepositoryFactory; import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout; import org.apache.maven.model.Model; import org.apache.maven.model.Parent; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; import org.apache.maven.model.io.xpp3.MavenXpp3Writer; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.MojoFailureException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Mojo; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.apache.maven.project.MavenProjectHelper; import org.apache.maven.project.artifact.ProjectArtifactMetadata; import org.apache.maven.project.validation.ModelValidationResult; import org.apache.maven.project.validation.ModelValidator; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.ReaderFactory; import org.codehaus.plexus.util.StringUtils; import org.codehaus.plexus.util.WriterFactory; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.util.List; import java.util.Map; /** * Signs artifacts and installs the artifact in the remote repository. * * @author Daniel Kulp * @since 1.0-beta-4 */ @Mojo( name = "sign-and-deploy-file", requiresProject = false, threadSafe = true ) public class SignAndDeployFileMojo extends AbstractGpgMojo { /** * The directory where to store signature files. */ @Parameter( property = "gpg.ascDirectory" ) private File ascDirectory; /** * Flag whether Maven is currently in online/offline mode. */ @Parameter( defaultValue = "${settings.offline}", readonly = true ) private boolean offline; /** * GroupId of the artifact to be deployed. Retrieved from POM file if specified. */ @Parameter( property = "groupId" ) private String groupId; /** * ArtifactId of the artifact to be deployed. Retrieved from POM file if specified. */ @Parameter( property = "artifactId" ) private String artifactId; /** * Version of the artifact to be deployed. Retrieved from POM file if specified. */ @Parameter( property = "version" ) private String version; /** * Type of the artifact to be deployed. Retrieved from POM file if specified. * Defaults to file extension if not specified via command line or POM. */ @Parameter( property = "packaging" ) private String packaging; /** * Add classifier to the artifact */ @Parameter( property = "classifier" ) private String classifier; /** * Description passed to a generated POM file (in case of generatePom=true). */ @Parameter( property = "generatePom.description" ) private String description; /** * File to be deployed. */ @Parameter( property = "file", required = true ) private File file; /** * Location of an existing POM file to be deployed alongside the main artifact, given by the ${file} parameter. */ @Parameter( property = "pomFile" ) private File pomFile; /** * Upload a POM for this artifact. Will generate a default POM if none is supplied with the pomFile argument. */ @Parameter( property = "generatePom", defaultValue = "true" ) private boolean generatePom; /** * Whether to deploy snapshots with a unique version or not. */ @Parameter( property = "uniqueVersion", defaultValue = "true" ) private boolean uniqueVersion; /** * URL where the artifact will be deployed. <br/> * ie ( file:///C:/m2-repo or scp://host.com/path/to/repo ) */ @Parameter( property = "url", required = true ) private String url; /** * Server Id to map on the &lt;id&gt; under &lt;server&gt; section of <code>settings.xml</code>. In most cases, this * parameter will be required for authentication. */ @Parameter( property = "repositoryId", defaultValue = "remote-repository", required = true ) private String repositoryId; /** * The type of remote repository layout to deploy to. Try <i>legacy</i> for a Maven 1.x-style repository layout. */ @Parameter( property = "repositoryLayout", defaultValue = "default" ) private String repositoryLayout; /** */ @Component private ArtifactDeployer deployer; /** */ @Parameter( defaultValue = "${localRepository}", required = true, readonly = true ) private ArtifactRepository localRepository; /** * Map that contains the layouts. */ @Component( role = ArtifactRepositoryLayout.class ) private Map repositoryLayouts; /** * Component used to create an artifact */ @Component private ArtifactFactory artifactFactory; /** * Component used to create a repository */ @Component private ArtifactRepositoryFactory repositoryFactory; /** * The component used to validate the user-supplied artifact coordinates. */ @Component private ModelValidator modelValidator; /** * The default Maven project created when building the plugin * * @since 1.3 */ @Component private MavenProject project; /** * Used for attaching the source and javadoc jars to the project. * * @since 1.3 */ @Component private MavenProjectHelper projectHelper; /** * The bundled API docs for the artifact. * * @since 1.3 */ @Parameter( property = "javadoc" ) private File javadoc; /** * The bundled sources for the artifact. * * @since 1.3 */ @Parameter( property = "sources" ) private File sources; /** * Parameter used to control how many times a failed deployment will be retried before giving up and failing. * If a value outside the range 1-10 is specified it will be pulled to the nearest value within the range 1-10. * * @since 1.3 */ @Parameter( property = "retryFailedDeploymentCount", defaultValue = "1" ) private int retryFailedDeploymentCount; /** * Parameter used to update the metadata to make the artifact as release. * * @since 1.3 */ @Parameter( property = "updateReleaseInfo", defaultValue = "false" ) protected boolean updateReleaseInfo; /** * A comma separated list of types for each of the extra side artifacts to deploy. If there is a mis-match in * the number of entries in {@link #files} or {@link #classifiers}, then an error will be raised. */ @Parameter( property = "types" ) private String types; /** * A comma separated list of classifiers for each of the extra side artifacts to deploy. If there is a mis-match in * the number of entries in {@link #files} or {@link #types}, then an error will be raised. */ @Parameter( property = "classifiers" ) private String classifiers; /** * A comma separated list of files for each of the extra side artifacts to deploy. If there is a mis-match in * the number of entries in {@link #types} or {@link #classifiers}, then an error will be raised. */ @Parameter( property = "files" ) private String files; private void initProperties() throws MojoExecutionException { // Process the supplied POM (if there is one) if ( pomFile != null ) { generatePom = false; Model model = readModel( pomFile ); processModel( model ); } if ( packaging == null && file != null ) { packaging = FileUtils.getExtension( file.getName() ); } } public void execute() throws MojoExecutionException, MojoFailureException { AbstractGpgSigner signer = newSigner( null ); signer.setOutputDirectory( ascDirectory ); signer.setBaseDirectory( new File( "" ).getAbsoluteFile() ); if ( offline ) { throw new MojoFailureException( "Cannot deploy artifacts when Maven is in offline mode" ); } initProperties(); validateArtifactInformation(); if ( !file.exists() ) { throw new MojoFailureException( file.getPath() + " not found." ); } ArtifactRepositoryLayout layout = (ArtifactRepositoryLayout) repositoryLayouts.get( repositoryLayout ); if ( layout == null ) { throw new MojoFailureException( "Invalid repository layout: " + repositoryLayout ); } ArtifactRepository deploymentRepository = repositoryFactory.createDeploymentArtifactRepository( repositoryId, url, layout, uniqueVersion ); if ( StringUtils.isEmpty( deploymentRepository.getProtocol() ) ) { throw new MojoFailureException( "No transfer protocol found." ); } Artifact artifact = artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, packaging, classifier ); if ( file.equals( getLocalRepoFile( artifact ) ) ) { throw new MojoFailureException( "Cannot deploy artifact from the local repository: " + file ); } File fileSig = signer.generateSignatureForArtifact( file ); ArtifactMetadata metadata = new AscArtifactMetadata( artifact, fileSig, false ); artifact.addMetadata( metadata ); if ( !"pom".equals( packaging ) ) { if ( pomFile == null && generatePom ) { pomFile = generatePomFile(); } if ( pomFile != null ) { metadata = new ProjectArtifactMetadata( artifact, pomFile ); artifact.addMetadata( metadata ); fileSig = signer.generateSignatureForArtifact( pomFile ); metadata = new AscArtifactMetadata( artifact, fileSig, true ); artifact.addMetadata( metadata ); } } if ( updateReleaseInfo ) { artifact.setRelease( true ); } project.setArtifact( artifact ); try { deploy( file, artifact, deploymentRepository, localRepository ); } catch ( ArtifactDeploymentException e ) { throw new MojoExecutionException( e.getMessage(), e ); } if ( sources != null ) { projectHelper.attachArtifact( project, "jar", "sources", sources ); } if ( javadoc != null ) { projectHelper.attachArtifact( project, "jar", "javadoc", javadoc ); } if ( files != null ) { if ( types == null ) { throw new MojoExecutionException( "You must specify 'types' if you specify 'files'" ); } if ( classifiers == null ) { throw new MojoExecutionException( "You must specify 'classifiers' if you specify 'files'" ); } int filesLength = StringUtils.countMatches( files, "," ); int typesLength = StringUtils.countMatches( types, "," ); int classifiersLength = StringUtils.countMatches( classifiers, "," ); if ( typesLength != filesLength ) { throw new MojoExecutionException( "You must specify the same number of entries in 'files' and " + "'types' (respectively " + filesLength + " and " + typesLength + " entries )" ); } if ( classifiersLength != filesLength ) { throw new MojoExecutionException( "You must specify the same number of entries in 'files' and " + "'classifiers' (respectively " + filesLength + " and " + classifiersLength + " entries )" ); } int fi = 0; int ti = 0; int ci = 0; for ( int i = 0; i <= filesLength; i++ ) { int nfi = files.indexOf( ',', fi ); if ( nfi == -1 ) { nfi = files.length(); } int nti = types.indexOf( ',', ti ); if ( nti == -1 ) { nti = types.length(); } int nci = classifiers.indexOf( ',', ci ); if ( nci == -1 ) { nci = classifiers.length(); } File file = new File( files.substring( fi, nfi ) ); if ( !file.isFile() ) { // try relative to the project basedir just in case file = new File( project.getBasedir(), files.substring( fi, nfi ) ); } if ( file.isFile() ) { if ( StringUtils.isWhitespace( classifiers.substring( ci, nci ) ) ) { projectHelper.attachArtifact( project, types.substring( ti, nti ).trim(), file ); } else { projectHelper.attachArtifact( project, types.substring( ti, nti).trim(), classifiers.substring( ci, nci ).trim(), file); } } else { throw new MojoExecutionException( "Specified side artifact " + file + " does not exist" ); } fi = nfi + 1; ti = nti + 1; ci = nci + 1; } } else { if ( types != null ) { throw new MojoExecutionException( "You must specify 'files' if you specify 'types'" ); } if ( classifiers != null ) { throw new MojoExecutionException( "You must specify 'files' if you specify 'classifiers'" ); } } List attachedArtifacts = project.getAttachedArtifacts(); for (Object attachedArtifact : attachedArtifacts) { Artifact attached = (Artifact) attachedArtifact; fileSig = signer.generateSignatureForArtifact(attached.getFile()); attached = new AttachedSignedArtifact(attached, new AscArtifactMetadata(attached, fileSig, false)); try { deploy(attached.getFile(), attached, deploymentRepository, localRepository); } catch (ArtifactDeploymentException e) { throw new MojoExecutionException( "Error deploying attached artifact " + attached.getFile() + ": " + e.getMessage(), e); } } } /** * Gets the path of the specified artifact within the local repository. Note that the returned path need not exist * (yet). * * @param artifact The artifact whose local repo path should be determined, must not be <code>null</code>. * @return The absolute path to the artifact when installed, never <code>null</code>. */ private File getLocalRepoFile( Artifact artifact ) { String path = localRepository.pathOf( artifact ); return new File( localRepository.getBasedir(), path ); } /** * Process the supplied pomFile to get groupId, artifactId, version, and packaging * * @param model The POM to extract missing artifact coordinates from, must not be <code>null</code>. */ private void processModel( Model model ) { Parent parent = model.getParent(); if ( this.groupId == null ) { this.groupId = model.getGroupId(); if ( this.groupId == null && parent != null ) { this.groupId = parent.getGroupId(); } } if ( this.artifactId == null ) { this.artifactId = model.getArtifactId(); } if ( this.version == null ) { this.version = model.getVersion(); if ( this.version == null && parent != null ) { this.version = parent.getVersion(); } } if ( this.packaging == null ) { this.packaging = model.getPackaging(); } } /** * Extract the model from the specified POM file. * * @param pomFile The path of the POM file to parse, must not be <code>null</code>. * @return The model from the POM file, never <code>null</code>. * @throws MojoExecutionException If the file doesn't exist of cannot be read. */ private Model readModel( File pomFile ) throws MojoExecutionException { Reader reader = null; try { reader = ReaderFactory.newXmlReader( pomFile ); return new MavenXpp3Reader().read( reader ); } catch ( FileNotFoundException e ) { throw new MojoExecutionException( "POM not found " + pomFile, e ); } catch ( IOException e ) { throw new MojoExecutionException( "Error reading POM " + pomFile, e ); } catch ( XmlPullParserException e ) { throw new MojoExecutionException( "Error parsing POM " + pomFile, e ); } finally { IOUtil.close( reader ); } } /** * Generates a minimal POM from the user-supplied artifact information. * * @return The path to the generated POM file, never <code>null</code>. * @throws MojoExecutionException If the generation failed. */ private File generatePomFile() throws MojoExecutionException { Model model = generateModel(); Writer fw = null; try { File tempFile = File.createTempFile( "mvndeploy", ".pom" ); tempFile.deleteOnExit(); fw = WriterFactory.newXmlWriter( tempFile ); new MavenXpp3Writer().write( fw, model ); return tempFile; } catch ( IOException e ) { throw new MojoExecutionException( "Error writing temporary pom file: " + e.getMessage(), e ); } finally { IOUtil.close( fw ); } } /** * Validates the user-supplied artifact information. * * @throws MojoFailureException If any artifact coordinate is invalid. */ private void validateArtifactInformation() throws MojoFailureException { Model model = generateModel(); ModelValidationResult result = modelValidator.validate( model ); if ( result.getMessageCount() > 0 ) { throw new MojoFailureException( "The artifact information is incomplete or not valid:\n" + result.render( " " ) ); } } /** * Generates a minimal model from the user-supplied artifact information. * * @return The generated model, never <code>null</code>. */ private Model generateModel() { Model model = new Model(); model.setModelVersion( "4.0.0" ); model.setGroupId( groupId ); model.setArtifactId( artifactId ); model.setVersion( version ); model.setPackaging( packaging ); model.setDescription( description ); return model; } /** * Deploy an artifact from a particular file. * * @param source the file to deploy * @param artifact the artifact definition * @param deploymentRepository the repository to deploy to * @param localRepository the local repository to install into * @throws ArtifactDeploymentException if an error occurred deploying the artifact */ protected void deploy( File source, Artifact artifact, ArtifactRepository deploymentRepository, ArtifactRepository localRepository ) throws ArtifactDeploymentException { int retryFailedDeploymentCount = Math.max( 1, Math.min( 10, this.retryFailedDeploymentCount ) ); ArtifactDeploymentException exception = null; for ( int count = 0; count < retryFailedDeploymentCount; count++ ) { try { if (count > 0) { getLog().info( "Retrying deployment attempt " + ( count + 1 ) + " of " + retryFailedDeploymentCount ); } deployer.deploy( source, artifact, deploymentRepository, localRepository ); for (Object o : artifact.getMetadataList()) { ArtifactMetadata metadata = (ArtifactMetadata) o; getLog().info("Metadata[" + metadata.getKey() + "].filename = " + metadata.getRemoteFilename()); } exception = null; break; } catch ( ArtifactDeploymentException e ) { if (count + 1 < retryFailedDeploymentCount) { getLog().warn( "Encountered issue during deployment: " + e.getLocalizedMessage()); getLog().debug( e ); } if ( exception == null ) { exception = e; } } } if ( exception != null ) { throw exception; } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/gkehub/v1alpha/configmanagement/configmanagement.proto package com.google.cloud.gkehub.configmanagement.v1alpha; /** * * * <pre> * State for Binauthz * </pre> * * Protobuf type {@code google.cloud.gkehub.configmanagement.v1alpha.BinauthzState} */ public final class BinauthzState extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.gkehub.configmanagement.v1alpha.BinauthzState) BinauthzStateOrBuilder { private static final long serialVersionUID = 0L; // Use BinauthzState.newBuilder() to construct. private BinauthzState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BinauthzState() { webhook_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BinauthzState(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private BinauthzState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { int rawValue = input.readEnum(); webhook_ = rawValue; break; } case 18: { com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.Builder subBuilder = null; if (version_ != null) { subBuilder = version_.toBuilder(); } version_ = input.readMessage( com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(version_); version_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_BinauthzState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_BinauthzState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState.class, com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState.Builder.class); } public static final int WEBHOOK_FIELD_NUMBER = 1; private int webhook_; /** * * * <pre> * The state of the binauthz webhook. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState webhook = 1;</code> * * @return The enum numeric value on the wire for webhook. */ @java.lang.Override public int getWebhookValue() { return webhook_; } /** * * * <pre> * The state of the binauthz webhook. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState webhook = 1;</code> * * @return The webhook. */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState getWebhook() { @SuppressWarnings("deprecation") com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState result = com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState.valueOf(webhook_); return result == null ? com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState.UNRECOGNIZED : result; } public static final int VERSION_FIELD_NUMBER = 2; private com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version_; /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> * * @return Whether the version field is set. */ @java.lang.Override public boolean hasVersion() { return version_ != null; } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> * * @return The version. */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion getVersion() { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.getDefaultInstance() : version_; } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersionOrBuilder getVersionOrBuilder() { return getVersion(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (webhook_ != com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState .DEPLOYMENT_STATE_UNSPECIFIED .getNumber()) { output.writeEnum(1, webhook_); } if (version_ != null) { output.writeMessage(2, getVersion()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (webhook_ != com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState .DEPLOYMENT_STATE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, webhook_); } if (version_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getVersion()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState)) { return super.equals(obj); } com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState other = (com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState) obj; if (webhook_ != other.webhook_) return false; if (hasVersion() != other.hasVersion()) return false; if (hasVersion()) { if (!getVersion().equals(other.getVersion())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + WEBHOOK_FIELD_NUMBER; hash = (53 * hash) + webhook_; if (hasVersion()) { hash = (37 * hash) + VERSION_FIELD_NUMBER; hash = (53 * hash) + getVersion().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * State for Binauthz * </pre> * * Protobuf type {@code google.cloud.gkehub.configmanagement.v1alpha.BinauthzState} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.gkehub.configmanagement.v1alpha.BinauthzState) com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_BinauthzState_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_BinauthzState_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState.class, com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState.Builder.class); } // Construct using com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); webhook_ = 0; if (versionBuilder_ == null) { version_ = null; } else { version_ = null; versionBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto .internal_static_google_cloud_gkehub_configmanagement_v1alpha_BinauthzState_descriptor; } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState getDefaultInstanceForType() { return com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState.getDefaultInstance(); } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState build() { com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState buildPartial() { com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState result = new com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState(this); result.webhook_ = webhook_; if (versionBuilder_ == null) { result.version_ = version_; } else { result.version_ = versionBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState) { return mergeFrom((com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState other) { if (other == com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState.getDefaultInstance()) return this; if (other.webhook_ != 0) { setWebhookValue(other.getWebhookValue()); } if (other.hasVersion()) { mergeVersion(other.getVersion()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int webhook_ = 0; /** * * * <pre> * The state of the binauthz webhook. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState webhook = 1;</code> * * @return The enum numeric value on the wire for webhook. */ @java.lang.Override public int getWebhookValue() { return webhook_; } /** * * * <pre> * The state of the binauthz webhook. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState webhook = 1;</code> * * @param value The enum numeric value on the wire for webhook to set. * @return This builder for chaining. */ public Builder setWebhookValue(int value) { webhook_ = value; onChanged(); return this; } /** * * * <pre> * The state of the binauthz webhook. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState webhook = 1;</code> * * @return The webhook. */ @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState getWebhook() { @SuppressWarnings("deprecation") com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState result = com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState.valueOf(webhook_); return result == null ? com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState.UNRECOGNIZED : result; } /** * * * <pre> * The state of the binauthz webhook. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState webhook = 1;</code> * * @param value The webhook to set. * @return This builder for chaining. */ public Builder setWebhook( com.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState value) { if (value == null) { throw new NullPointerException(); } webhook_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * The state of the binauthz webhook. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.DeploymentState webhook = 1;</code> * * @return This builder for chaining. */ public Builder clearWebhook() { webhook_ = 0; onChanged(); return this; } private com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion, com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.Builder, com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersionOrBuilder> versionBuilder_; /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> * * @return Whether the version field is set. */ public boolean hasVersion() { return versionBuilder_ != null || version_ != null; } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> * * @return The version. */ public com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion getVersion() { if (versionBuilder_ == null) { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.getDefaultInstance() : version_; } else { return versionBuilder_.getMessage(); } } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> */ public Builder setVersion( com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion value) { if (versionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } version_ = value; onChanged(); } else { versionBuilder_.setMessage(value); } return this; } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> */ public Builder setVersion( com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.Builder builderForValue) { if (versionBuilder_ == null) { version_ = builderForValue.build(); onChanged(); } else { versionBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> */ public Builder mergeVersion( com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion value) { if (versionBuilder_ == null) { if (version_ != null) { version_ = com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.newBuilder(version_) .mergeFrom(value) .buildPartial(); } else { version_ = value; } onChanged(); } else { versionBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> */ public Builder clearVersion() { if (versionBuilder_ == null) { version_ = null; onChanged(); } else { version_ = null; versionBuilder_ = null; } return this; } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> */ public com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.Builder getVersionBuilder() { onChanged(); return getVersionFieldBuilder().getBuilder(); } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> */ public com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersionOrBuilder getVersionOrBuilder() { if (versionBuilder_ != null) { return versionBuilder_.getMessageOrBuilder(); } else { return version_ == null ? com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.getDefaultInstance() : version_; } } /** * * * <pre> * The version of binauthz that is installed. * </pre> * * <code>.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion version = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion, com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.Builder, com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersionOrBuilder> getVersionFieldBuilder() { if (versionBuilder_ == null) { versionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion, com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersion.Builder, com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzVersionOrBuilder>( getVersion(), getParentForChildren(), isClean()); version_ = null; } return versionBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.gkehub.configmanagement.v1alpha.BinauthzState) } // @@protoc_insertion_point(class_scope:google.cloud.gkehub.configmanagement.v1alpha.BinauthzState) private static final com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState(); } public static com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BinauthzState> PARSER = new com.google.protobuf.AbstractParser<BinauthzState>() { @java.lang.Override public BinauthzState parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new BinauthzState(input, extensionRegistry); } }; public static com.google.protobuf.Parser<BinauthzState> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BinauthzState> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.gkehub.configmanagement.v1alpha.BinauthzState getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (C) 2014, United States Government, as represented by the * Administrator of the National Aeronautics and Space Administration. * All rights reserved. * * The Java Pathfinder core (jpf-core) platform is licensed under the * Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gov.nasa.jpf.vm; import gov.nasa.jpf.Config; import gov.nasa.jpf.JPF; import gov.nasa.jpf.JPFException; import gov.nasa.jpf.annotation.MJI; import gov.nasa.jpf.util.IntTable; import gov.nasa.jpf.util.JPFLogger; import gov.nasa.jpf.util.ObjectConverter; import gov.nasa.jpf.util.ObjectList; import gov.nasa.jpf.util.RunListener; import gov.nasa.jpf.util.RunRegistry; import gov.nasa.jpf.util.json.CGCall; import gov.nasa.jpf.util.json.JSONLexer; import gov.nasa.jpf.util.json.JSONObject; import gov.nasa.jpf.util.json.JSONParser; import gov.nasa.jpf.vm.choice.DoubleChoiceFromList; import gov.nasa.jpf.vm.choice.FloatChoiceFromList; import gov.nasa.jpf.vm.choice.IntChoiceFromSet; import gov.nasa.jpf.vm.choice.IntIntervalGenerator; import gov.nasa.jpf.vm.choice.LongChoiceFromList; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.ObjectInputStream; import java.io.PrintStream; import java.util.BitSet; import java.util.List; /** * native peer class for programmatic JPF interface (that can be used inside * of apps to verify - if you are aware of the danger that comes with it) * * this peer is a bit different in that it only uses static fields and methods because * its use is supposed to be JPF global (without classloader namespaces) */ public class JPF_gov_nasa_jpf_vm_Verify extends NativePeer { static final int MAX_COUNTERS = 127; static boolean isInitialized; // those are used to store search global int values (e.g. from TestJPF derived classes) static int[] counter; static IntTable<String> map; public static int heuristicSearchValue; static boolean supportIgnorePath; static boolean breakSingleChoice; static boolean enableAtomic; static Config config; // we need to keep this around for CG creation // our const ChoiceGenerator ctor argtypes static Class[] cgArgTypes = { Config.class, String.class }; // this is our cache for ChoiceGenerator ctor parameters static Object[] cgArgs = { null, null }; static BitSet[] bitSets; static int nextBitSet; static PrintStream out; public static boolean init (Config conf) { if (!isInitialized){ supportIgnorePath = conf.getBoolean("vm.verify.ignore_path"); breakSingleChoice = conf.getBoolean("cg.break_single_choice"); enableAtomic = conf.getBoolean("cg.enable_atomic", true); heuristicSearchValue = conf.getInt("search.heuristic.default_value"); counter = null; map = null; config = conf; String outFile = conf.getString("vm.verify.output_file"); if (outFile != null){ try { out = new PrintStream(outFile); } catch (FileNotFoundException fnx){ System.err.println("error: could not open verify output file " + outFile + ", using System.out"); out = System.out; } } else { out = System.out; } Verify.setPeerClass( JPF_gov_nasa_jpf_vm_Verify.class); RunRegistry.getDefaultRegistry().addListener( new RunListener() { @Override public void reset (RunRegistry reg){ isInitialized = false; } }); } return true; } public static final int NO_VALUE = -1; @MJI public static int getValue__Ljava_lang_String_2__I (MJIEnv env, int clsObjRef, int keyRef) { if (map == null) { return NO_VALUE; } else { String key = env.getStringObject(keyRef); IntTable.Entry<String> e = map.get(key); if (e != null) { return e.val; } else { return NO_VALUE; } } } @MJI public static void putValue__Ljava_lang_String_2I__V (MJIEnv env, int clsObjRef, int keyRef, int val) { if (map == null) { map = new IntTable<String>(); } String key = env.getStringObject(keyRef); map.put(key, val); } @MJI public static int getCounter__I__I (MJIEnv env, int clsObjRef, int counterId) { if ((counter == null) || (counterId < 0) || (counterId >= counter.length)) { return 0; } return counter[counterId]; } private static void ensureCounterCapacity (int counterId){ if (counter == null) { counter = new int[(counterId >= MAX_COUNTERS) ? counterId+1 : MAX_COUNTERS]; } else if (counterId >= counter.length) { int[] newCounter = new int[counterId+1]; System.arraycopy(counter, 0, newCounter, 0, counter.length); counter = newCounter; } } @MJI public static void resetCounter__I__V (MJIEnv env, int clsObjRef, int counterId) { if ((counter == null) || (counterId < 0) || (counterId >= counter.length)) { return; } counter[counterId] = 0; } @MJI public static void setCounter__II__V (MJIEnv env, int clsObjRef, int counterId, int val) { if (counterId < 0){ return; } ensureCounterCapacity(counterId); counter[counterId] = val; } @MJI public static int incrementCounter__I__I (MJIEnv env, int clsObjRef, int counterId) { if (counterId < 0) { return 0; } ensureCounterCapacity(counterId); return ++counter[counterId]; } private static void checkBitSetId(int id) { if (bitSets == null) { bitSets = new BitSet[id + 1]; } else if (id >= bitSets.length) { BitSet[] newBitSets = new BitSet[id + 1]; System.arraycopy(bitSets, 0, newBitSets, 0, bitSets.length); bitSets = newBitSets; } if (bitSets[id] == null) { bitSets[id] = new BitSet(); } } @MJI public static void setBitInBitSet__IIZ__V(MJIEnv env, int clsObjRef, int id, int bitNum, boolean value) { checkBitSetId(id); bitSets[id].set(bitNum, value); } @MJI public static boolean getBitInBitSet__II__Z(MJIEnv env, int clsObjRef, int id, int bitNum) { checkBitSetId(id); return bitSets[id].get(bitNum); } @MJI public static long currentTimeMillis____J (MJIEnv env, int clsObjRef) { return System.currentTimeMillis(); } @MJI public static String getType (int objRef, MJIEnv env) { return Types.getTypeName(env.getElementInfo(objRef).getType()); } @MJI public static void addComment__Ljava_lang_String_2__V (MJIEnv env, int clsObjRef, int stringRef) { SystemState ss = env.getSystemState(); String cmt = env.getStringObject(stringRef); ss.getTrail().setAnnotation(cmt); } @MJI public static void assertTrue__Z__V (MJIEnv env, int clsObjRef, boolean b) { if (!b) { env.throwException("java.lang.AssertionError", "assertTrue failed"); } } // those are evil - use with extreme care. If something blocks inside of // an atomic section we have to raise an exception @MJI public static void beginAtomic____V (MJIEnv env, int clsObjRef) { if (enableAtomic){ ThreadInfo tiAtomic = env.getThreadInfo(); if (tiAtomic.getScheduler().setsBeginAtomicCG(tiAtomic)){ env.repeatInvocation(); return; } env.getSystemState().incAtomic(); } } @MJI public static void endAtomic____V (MJIEnv env, int clsObjRef) { if (enableAtomic){ ThreadInfo tiAtomic = env.getThreadInfo(); if (!tiAtomic.isFirstStepInsn()){ env.getSystemState().decAtomic(); } if (tiAtomic.getScheduler().setsEndAtomicCG(tiAtomic)){ env.repeatInvocation(); return; } } } @MJI public static void busyWait__J__V (MJIEnv env, int clsObjRef, long duration) { // nothing required here (we systematically explore scheduling // sequences anyway), but we need to intercept the call } @MJI public static void ignoreIf__Z__V (MJIEnv env, int clsObjRef, boolean cond) { if (supportIgnorePath) { env.getSystemState().setIgnored(cond); } } @MJI public static void interesting__Z__V (MJIEnv env, int clsObjRef, boolean cond) { env.getSystemState().setInteresting(cond); } @MJI public static void breakTransition__Ljava_lang_String_2__V (MJIEnv env, int clsObjRef, int reasonRef){ ThreadInfo ti = env.getThreadInfo(); String reason = env.getStringObject(reasonRef); ti.breakTransition(reason); } /** * mostly for debugging purposes - this does not optimize away single choice CGs */ @MJI public int breakTransition__Ljava_lang_String_2II__I (MJIEnv env, int clsObjRef, int reasonRef, int min, int max) { ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); String reason = env.getStringObject(reasonRef); if (!ti.isFirstStepInsn()) { // first time around IntChoiceGenerator cg = new IntIntervalGenerator( reason, min,max); if (ss.setNextChoiceGenerator(cg)){ env.repeatInvocation(); } return -1; } else { return getNextChoice(ss, reason, IntChoiceGenerator.class, Integer.class); } } @MJI public static boolean isCalledFromClass__Ljava_lang_String_2__Z (MJIEnv env, int clsObjRef, int clsNameRef) { String refClassName = env.getStringObject(clsNameRef); ThreadInfo ti = env.getThreadInfo(); StackFrame caller = ti.getLastInvokedStackFrame(); if (caller != null){ ClassInfo ci = caller.getClassInfo(); return ci.isInstanceOf(refClassName); } return false; } static <T extends ChoiceGenerator<?>> T createChoiceGenerator (Class<T> cgClass, SystemState ss, String id) { T gen = null; cgArgs[0] = config; cgArgs[1] = id; // good thing we are not multithreaded (other fields are const) String key = id + ".class"; gen = config.getEssentialInstance(key, cgClass, cgArgTypes, cgArgs); return gen; } static <T> T registerChoiceGenerator (MJIEnv env, SystemState ss, ThreadInfo ti, ChoiceGenerator<T> cg, T dummyVal){ int n = cg.getTotalNumberOfChoices(); if (n == 0) { // nothing, just return the default value } else if (n == 1 && !breakSingleChoice) { // no choice -> no CG optimization cg.advance(); return cg.getNextChoice(); } else { if (ss.setNextChoiceGenerator(cg)){ env.repeatInvocation(); } } return dummyVal; } static <T,C extends ChoiceGenerator<T>> T getNextChoice (SystemState ss, String id, Class<C> cgClass, Class<T> choiceClass){ ChoiceGenerator<?> cg = ss.getCurrentChoiceGenerator(id, cgClass); assert (cg != null) : "no ChoiceGenerator of type " + cgClass.getName(); return ((ChoiceGenerator<T>)cg).getNextChoice(); } @MJI public static boolean getBoolean____Z (MJIEnv env, int clsObjRef) { ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); ChoiceGenerator<?> cg; if (!ti.isFirstStepInsn()) { // first time around cg = new BooleanChoiceGenerator(config, "verifyGetBoolean"); if (ss.setNextChoiceGenerator(cg)){ env.repeatInvocation(); } return true; // not used if we repeat } else { // this is what really returns results return getNextChoice(ss,"verifyGetBoolean", BooleanChoiceGenerator.class,Boolean.class); } } @MJI public static boolean getBoolean__Z__Z (MJIEnv env, int clsObjRef, boolean falseFirst) { ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); ChoiceGenerator<?> cg; if (!ti.isFirstStepInsn()) { // first time around cg = new BooleanChoiceGenerator( "verifyGetBoolean(Z)", falseFirst ); if (ss.setNextChoiceGenerator(cg)){ env.repeatInvocation(); } return true; // not used if we repeat } else { // this is what really returns results return getNextChoice(ss,"verifyGetBoolean(Z)", BooleanChoiceGenerator.class, Boolean.class); } } @MJI public static int getInt__II__I (MJIEnv env, int clsObjRef, int min, int max) { ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); if (!ti.isFirstStepInsn()) { // first time around if (min > max){ int t = max; max = min; min = t; } IntChoiceGenerator cg = new IntIntervalGenerator( "verifyGetInt(II)", min,max); return registerChoiceGenerator(env,ss,ti,cg,0); } else { return getNextChoice(ss, "verifyGetInt(II)", IntChoiceGenerator.class, Integer.class); } } static int getIntFromList (MJIEnv env, int[] values){ ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); if (!ti.isFirstStepInsn()) { // first time around ChoiceGenerator<Integer> cg = new IntChoiceFromSet( "verifyGetIntSet([I)", values); return registerChoiceGenerator(env,ss,ti,cg,0); } else { return getNextChoice(ss, "verifyGetIntSet([I)", IntChoiceGenerator.class, Integer.class); } } @MJI public static int getIntFromList___3I__I (MJIEnv env, int clsObjRef, int valArrayRef){ int[] values = env.getIntArrayObject(valArrayRef); return getIntFromList( env, values); } @MJI public static int getInt__Ljava_lang_String_2__I (MJIEnv env, int clsObjRef, int idRef) { ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); if (!ti.isFirstStepInsn()) { // first time around String id = env.getStringObject(idRef); IntChoiceGenerator cg = createChoiceGenerator( IntChoiceGenerator.class, ss, id); return registerChoiceGenerator(env,ss,ti,cg, 0); } else { String id = env.getStringObject(idRef); return getNextChoice(ss, id, IntChoiceGenerator.class,Integer.class); } } static long getLongFromList (MJIEnv env, long[] values){ ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); if (!ti.isFirstStepInsn()) { // first time around ChoiceGenerator<Long> cg = new LongChoiceFromList( "verifyLongList([J)", values); return registerChoiceGenerator(env,ss,ti,cg,0L); } else { return getNextChoice(ss, "verifyLongList([J)", LongChoiceGenerator.class, Long.class); } } @MJI public static long getLongFromList___3J__J (MJIEnv env, int clsObjRef, int valArrayRef){ long[] values = env.getLongArrayObject(valArrayRef); return getLongFromList( env, values); } @MJI public static int getObject__Ljava_lang_String_2__Ljava_lang_Object_2 (MJIEnv env, int clsObjRef, int idRef) { ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); if (!ti.isFirstStepInsn()) { // first time around String id = env.getStringObject(idRef); ReferenceChoiceGenerator cg = createChoiceGenerator( ReferenceChoiceGenerator.class, ss, id); return registerChoiceGenerator(env,ss,ti,cg, MJIEnv.NULL); } else { String id = env.getStringObject(idRef); return getNextChoice(ss, id, ReferenceChoiceGenerator.class,Integer.class); } } @MJI public static double getDouble__Ljava_lang_String_2__D (MJIEnv env, int clsObjRef, int idRef) { ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); if (!ti.isFirstStepInsn()) { // first time around String id = env.getStringObject(idRef); DoubleChoiceGenerator cg = createChoiceGenerator( DoubleChoiceGenerator.class, ss, id); return registerChoiceGenerator(env,ss,ti,cg, 0.0); } else { String id = env.getStringObject(idRef); return getNextChoice(ss, id, DoubleChoiceGenerator.class,Double.class); } } @MJI public static double getDoubleFromList (MJIEnv env, double[] values){ ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); if (!ti.isFirstStepInsn()) { // first time around ChoiceGenerator<Double> cg = new DoubleChoiceFromList("verifyDoubleList([D)", values); return registerChoiceGenerator(env,ss,ti,cg, 0.0); } else { return getNextChoice(ss, "verifyDoubleList([D)", DoubleChoiceFromList.class,Double.class); } } @MJI public static double getDoubleFromList___3D__D (MJIEnv env, int clsObjRef, int valArrayRef){ double[] values = env.getDoubleArrayObject(valArrayRef); return getDoubleFromList( env, values); } @MJI public static float getFloatFromList (MJIEnv env, float[] values){ ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); if (!ti.isFirstStepInsn()) { // first time around ChoiceGenerator<Float> cg = new FloatChoiceFromList("verifyFloatList([F)", values); return registerChoiceGenerator(env,ss,ti,cg, 0.0f); } else { return getNextChoice(ss, "verifyFloatList([F)", FloatChoiceFromList.class, Float.class); } } @MJI public static float getFloatFromList___3F__F (MJIEnv env, int clsObjRef, int valArrayRef){ float[] values = env.getFloatArrayObject(valArrayRef); return getFloatFromList( env, values); } @MJI public static void threadPrint__Ljava_lang_String_2__V (MJIEnv env, int clsRef, int sRef){ String s = env.getStringObject(sRef); ThreadInfo ti = env.getThreadInfo(); System.out.print(ti.getName()); System.out.print(s); } @MJI public static void print__Ljava_lang_String_2I__V (MJIEnv env, int clsRef, int sRef, int val){ String s = env.getStringObject(sRef); System.out.print(s + " : " + val); } @MJI public static void print__Ljava_lang_String_2Z__V (MJIEnv env, int clsRef, int sRef, boolean val){ String s = env.getStringObject(sRef); System.out.print(s + " : " + val); } @MJI public static void print___3Ljava_lang_String_2__V (MJIEnv env, int clsRef, int argsRef){ int n = env.getArrayLength(argsRef); for (int i=0; i<n; i++){ int aref = env.getReferenceArrayElement(argsRef, i); String s = env.getStringObject(aref); System.out.print(s); } } @MJI public static void print__Ljava_lang_String_2__V (MJIEnv env, int clsRef, int sRef){ String s = env.getStringObject(sRef); System.out.print(s); } @MJI public static void println__Ljava_lang_String_2__V (MJIEnv env, int clsRef, int sRef){ String s = env.getStringObject(sRef); System.out.println(s); } @MJI public static void threadPrintln__Ljava_lang_String_2__V (MJIEnv env, int clsRef, int sRef){ threadPrint__Ljava_lang_String_2__V(env, clsRef, sRef); System.out.println(); } @MJI public static void println____V (MJIEnv env, int clsRef){ System.out.println(); } //--- various attribute test methods private static int getAttribute (MJIEnv env, Object a){ if (a != null) { if (a instanceof Integer) { return ((Integer) a).intValue(); } else { env.throwException("java.lang.RuntimeException", "element attribute not an Integer: " + a); } } return 0; } private static int getAttributeList (MJIEnv env, Object a){ if (a != null) { int l = ObjectList.size(a); int[] attrs = new int[l]; int i = 0; for (Integer v : ObjectList.typedIterator(a, Integer.class)) { attrs[i++] = v; } if (i != l) { env.throwException("java.lang.RuntimeException", "found non-Integer attributes"); return 0; } return env.newIntArray(attrs); } else { return MJIEnv.NULL; } } @MJI public static void setObjectAttribute__Ljava_lang_Object_2I__V (MJIEnv env, int clsRef, int oRef, int attr){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); ei.setObjectAttr(Integer.valueOf(attr)); } } @MJI public static int getObjectAttribute__Ljava_lang_Object_2__I (MJIEnv env, int clsRef, int oRef){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); return getAttribute( env, ei.getObjectAttr()); } return 0; } @MJI public static void addObjectAttribute__Ljava_lang_Object_2I__V (MJIEnv env, int clsRef, int oRef, int attr){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); ei.addObjectAttr(Integer.valueOf(attr)); } } @MJI public static int getObjectAttributes__Ljava_lang_Object_2___3I (MJIEnv env, int clsRef, int oRef){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); return getAttributeList( env, ei.getObjectAttr()); } return MJIEnv.NULL; } @MJI public static void setFieldAttribute__Ljava_lang_Object_2Ljava_lang_String_2I__V (MJIEnv env, int clsRef, int oRef, int fnRef, int attr){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); if (ei != null){ String fname = env.getStringObject(fnRef); FieldInfo fi = ei.getFieldInfo(fname); if (fi != null) { ei.setFieldAttr(fi, Integer.valueOf(attr)); } else { env.throwException("java.lang.NoSuchFieldException", ei.getClassInfo().getName() + '.' + fname); } } else { env.throwException("java.lang.RuntimeException", "illegal reference value: " + oRef); } } } @MJI public static int getFieldAttribute__Ljava_lang_Object_2Ljava_lang_String_2__I (MJIEnv env, int clsRef, int oRef, int fnRef){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); if (ei != null){ String fname = env.getStringObject(fnRef); FieldInfo fi = ei.getFieldInfo(fname); if (fi != null) { return getAttribute( env, ei.getFieldAttr(fi)); } else { env.throwException("java.lang.NoSuchFieldException", ei.toString() + '.' + fname); } } else { env.throwException("java.lang.RuntimeException", "illegal reference value: " + oRef); } } return 0; } @MJI public static void addFieldAttribute__Ljava_lang_Object_2Ljava_lang_String_2I__V (MJIEnv env, int clsRef, int oRef, int fnRef, int attr){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); if (ei != null){ String fname = env.getStringObject(fnRef); FieldInfo fi = ei.getFieldInfo(fname); if (fi != null) { ei.addFieldAttr(fi, Integer.valueOf(attr)); } else { env.throwException("java.lang.NoSuchFieldException", ei.getClassInfo().getName() + '.' + fname); } } else { env.throwException("java.lang.RuntimeException", "illegal reference value: " + oRef); } } } @MJI public static int getFieldAttributes__Ljava_lang_Object_2Ljava_lang_String_2___3I (MJIEnv env, int clsRef, int oRef, int fnRef){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); if (ei != null){ String fname = env.getStringObject(fnRef); FieldInfo fi = ei.getFieldInfo(fname); if (fi != null) { return getAttributeList( env, ei.getFieldAttr(fi)); } else { env.throwException("java.lang.NoSuchFieldException", ei.toString() + '.' + fname); } } else { env.throwException("java.lang.RuntimeException", "illegal reference value: " + oRef); } } return MJIEnv.NULL; } @MJI public static void setLocalAttribute__Ljava_lang_String_2I__V (MJIEnv env, int clsRef, int varRef, int attr) { String slotName = env.getStringObject(varRef); StackFrame frame = env.getModifiableCallerStackFrame(); // we are executing in a NativeStackFrame if (!frame.getMethodInfo().isStatic() && slotName.equals("this")) { frame.setLocalAttr(0, Integer.valueOf(attr)); // only for instance methods of course } else { int slotIdx = frame.getLocalVariableSlotIndex(slotName); if (slotIdx >= 0) { frame.setLocalAttr(slotIdx, Integer.valueOf(attr)); } else { env.throwException("java.lang.RuntimeException", "local variable not found: " + slotName); } } } @MJI public static int getLocalAttribute__Ljava_lang_String_2__I (MJIEnv env, int clsRef, int varRef) { String slotName = env.getStringObject(varRef); ThreadInfo ti = env.getThreadInfo(); StackFrame frame = env.getCallerStackFrame(); int slotIdx = frame.getLocalVariableSlotIndex(slotName); if (slotIdx >= 0) { return getAttribute( env, frame.getLocalAttr(slotIdx)); } else { env.throwException("java.lang.RuntimeException", "local variable not found: " + slotName); return 0; } } @MJI public static void addLocalAttribute__Ljava_lang_String_2I__V (MJIEnv env, int clsRef, int varRef, int attr) { String slotName = env.getStringObject(varRef); StackFrame frame = env.getModifiableCallerStackFrame(); // we are executing in a NativeStackFrame if (!frame.getMethodInfo().isStatic() && slotName.equals("this")) { frame.addLocalAttr(0, Integer.valueOf(attr)); // only for instance methods of course } else { int slotIdx = frame.getLocalVariableSlotIndex(slotName); if (slotIdx >= 0) { frame.addLocalAttr(slotIdx, Integer.valueOf(attr)); } else { env.throwException("java.lang.RuntimeException", "local variable not found: " + slotName); } } } @MJI public static int getLocalAttributes__Ljava_lang_String_2___3I (MJIEnv env, int clsRef, int varRef) { String slotName = env.getStringObject(varRef); ThreadInfo ti = env.getThreadInfo(); StackFrame frame = env.getCallerStackFrame(); int slotIdx = frame.getLocalVariableSlotIndex(slotName); if (slotIdx >= 0) { return getAttributeList( env, frame.getLocalAttr(slotIdx)); } else { env.throwException("java.lang.RuntimeException", "local variable not found: " + slotName); } return MJIEnv.NULL; } @MJI public static void setElementAttribute__Ljava_lang_Object_2II__V (MJIEnv env, int clsRef, int oRef, int idx, int attr){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); if (ei != null){ if (ei.isArray()) { if (idx < ei.arrayLength()) { ei.setElementAttr(idx, Integer.valueOf(attr)); } else { env.throwException("java.lang.ArrayIndexOutOfBoundsException", Integer.toString(idx)); } } else { env.throwException("java.lang.RuntimeException", "not an array: " + ei); } } else { env.throwException("java.lang.RuntimeException", "illegal reference value: " + oRef); } } } @MJI public static int getElementAttribute__Ljava_lang_Object_2I__I (MJIEnv env, int clsRef, int oRef, int idx){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); if (ei != null) { if (ei.isArray()) { if (idx < ei.arrayLength()) { return getAttribute( env, ei.getElementAttr( idx)); } else { env.throwException("java.lang.ArrayIndexOutOfBoundsException", Integer.toString(idx)); } } else { env.throwException("java.lang.RuntimeException", "not an array: " + ei); } } else { env.throwException("java.lang.RuntimeException", "illegal reference value: " + oRef); } } return 0; } @MJI public static void addElementAttribute__Ljava_lang_Object_2II__V (MJIEnv env, int clsRef, int oRef, int idx, int attr){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); if (ei != null){ if (ei.isArray()) { if (idx < ei.arrayLength()) { ei.addElementAttr(idx, Integer.valueOf(attr)); } else { env.throwException("java.lang.ArrayIndexOutOfBoundsException", Integer.toString(idx)); } } else { env.throwException("java.lang.RuntimeException", "not an array: " + ei); } } else { env.throwException("java.lang.RuntimeException", "illegal reference value: " + oRef); } } } @MJI public static int getElementAttributes__Ljava_lang_Object_2I___3I (MJIEnv env, int clsRef, int oRef, int idx){ if (oRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(oRef); if (ei != null) { if (ei.isArray()) { if (idx < ei.arrayLength()) { return getAttributeList( env, ei.getElementAttr( idx)); } else { env.throwException("java.lang.ArrayIndexOutOfBoundsException", Integer.toString(idx)); } } else { env.throwException("java.lang.RuntimeException", "not an array: " + ei); } } else { env.throwException("java.lang.RuntimeException", "illegal reference value: " + oRef); } } return MJIEnv.NULL; } /** * deprecated, use getBoolean() */ @MJI public static boolean randomBool (MJIEnv env, int clsObjRef) { //SystemState ss = env.getSystemState(); //return (ss.random(2) != 0); return getBoolean____Z(env, clsObjRef); } /** * deprecated, use getInt */ @MJI public static int random__I__I (MJIEnv env, int clsObjRef, int x) { return getInt__II__I(env, clsObjRef, 0, x); } static void boring__Z__V (MJIEnv env, int clsObjRef, boolean b) { env.getSystemState().setBoring(b); } @MJI public static boolean isRunningInJPF____Z(MJIEnv env, int clsObjRef) { return true; } @MJI public static boolean vmIsMatchingStates____Z(MJIEnv env, int clsObjRef) { return env.getVM().getStateSet() != null; } @MJI public static void storeTrace__Ljava_lang_String_2Ljava_lang_String_2__V (MJIEnv env, int clsObjRef, int filenameRef, int commentRef) { String fileName = env.getStringObject(filenameRef); String comment = env.getStringObject(commentRef); env.getVM().storeTrace(fileName, comment, config.getBoolean("trace.verbose", false)); } @MJI public static void terminateSearch____V (MJIEnv env, int clsObjRef) { JPF jpf = env.getVM().getJPF(); jpf.getSearch().terminate(); } @MJI public static void setHeuristicSearchValue__I__V (MJIEnv env, int clsObjRef, int val){ heuristicSearchValue = val; } @MJI public static int getHeuristicSearchValue____I (MJIEnv env, int clsObjRef){ return heuristicSearchValue; } @MJI public static void resetHeuristicSearchValue____V (MJIEnv env, int clsObjRef){ heuristicSearchValue = config.getInt("search.heuristic.default_value"); } @MJI public static boolean isTraceReplay____Z (MJIEnv env, int clsObjRef) { return env.getVM().isTraceReplay(); } @MJI public static boolean isShared__Ljava_lang_Object_2__Z (MJIEnv env, int clsObjRef, int objRef){ if (objRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(objRef); if (ei != null){ return ei.isShared(); } } return false; } @MJI public static void setShared__Ljava_lang_Object_2Z__V (MJIEnv env, int clsObjRef, int objRef, boolean isShared) { if (objRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(objRef); if (ei != null){ if (ei.getClassInfo() == ClassLoaderInfo.getCurrentSystemClassLoader().getClassClassInfo()) { // it's a class object, set static fields shared ei = env.getStaticElementInfo(objRef); } if (ei.isShared() != isShared) { ei = ei.getModifiableInstance(); ei.setShared( env.getThreadInfo(), isShared); } } } } @MJI public static void freezeSharedness__Ljava_lang_Object_2Z__V (MJIEnv env, int clsObjRef, int objRef, boolean freeze) { if (objRef != MJIEnv.NULL){ ElementInfo ei = env.getElementInfo(objRef); if (ei != null) { if (ei.getClassInfo() == ClassLoaderInfo.getCurrentSystemClassLoader().getClassClassInfo()) { // it's a class object, freeze sharedness of static fields ei = env.getStaticElementInfo(objRef); } if (ei.isSharednessFrozen() != freeze) { ei = ei.getModifiableInstance(); ei.freezeSharedness(freeze); } } } } @MJI public static void setProperties___3Ljava_lang_String_2__V (MJIEnv env, int clsObjRef, int argRef) { if (argRef != MJIEnv.NULL) { Config conf = env.getConfig(); int n = env.getArrayLength(argRef); for (int i=0; i<n; i++) { int pRef = env.getReferenceArrayElement(argRef, i); if (pRef != MJIEnv.NULL) { String p = env.getStringObject(pRef); config.parse(p); } } } } @MJI public static int getProperty__Ljava_lang_String_2__Ljava_lang_String_2 (MJIEnv env, int clsObjRef, int keyRef) { if (keyRef != MJIEnv.NULL){ Config conf = env.getConfig(); String key = env.getStringObject(keyRef); String val = config.getString(key); if (val != null){ return env.newString(val); } else { return MJIEnv.NULL; } } else { return MJIEnv.NULL; } } @MJI public static void printPathOutput__ZLjava_lang_String_2__V (MJIEnv env, int clsObjRef, boolean cond, int msgRef){ if (cond){ printPathOutput__Ljava_lang_String_2__V(env,clsObjRef,msgRef); } } @MJI public static void printPathOutput__Ljava_lang_String_2__V (MJIEnv env, int clsObjRef, int msgRef){ VM vm = env.getVM(); System.out.println(); if (msgRef != MJIEnv.NULL){ String msg = env.getStringObject(msgRef); System.out.println("~~~~~~~~~~~~~~~~~~~~~~~ begin program output at: " + msg); } else { System.out.println("~~~~~~~~~~~~~~~~~~~~~~~ begin path output"); } for (Transition t : vm.getPath()) { String s = t.getOutput(); if (s != null) { System.out.print(s); } } // we might be in the middle of a transition that isn't stored yet in the path String s = vm.getPendingOutput(); if (s != null) { System.out.print(s); } System.out.println("~~~~~~~~~~~~~~~~~~~~~~~ end path output"); } // the JSON object initialization @MJI public static int createFromJSON__Ljava_lang_Class_2Ljava_lang_String_2__Ljava_lang_Object_2( MJIEnv env, int clsObjRef, int newObjClsRef, int jsonStringRef) { ThreadInfo ti = env.getThreadInfo(); SystemState ss = env.getSystemState(); String jsonString = env.getStringObject(jsonStringRef); JSONLexer lexer = new JSONLexer(jsonString); JSONParser parser = new JSONParser(lexer); JSONObject jsonObject = parser.parse(); if (jsonObject != null) { ClassInfo ci = env.getReferredClassInfo( newObjClsRef); // check if we need any class init (and hence reexecution) before creating any CGs if (jsonObject.requiresClinitExecution(ci,ti)){ env.repeatInvocation(); return MJIEnv.NULL; } if (!ti.isFirstStepInsn()) { // Top half - get and register CGs we need to set to fill object from JSON List<ChoiceGenerator<?>> cgList = CGCall.createCGList(jsonObject); if (cgList.isEmpty()){ return jsonObject.fillObject(env, ci, null, ""); } else { for (ChoiceGenerator<?> cg : cgList) { ss.setNextChoiceGenerator(cg); } env.repeatInvocation(); return MJIEnv.NULL; } } else { // Bottom half - fill object with JSON and current values of CGs ChoiceGenerator<?>[] cgs = ss.getChoiceGenerators(); return jsonObject.fillObject(env, ci, cgs, ""); } } else { return MJIEnv.NULL; } } @MJI public static int readObjectFromFile__Ljava_lang_Class_2Ljava_lang_String_2__Ljava_lang_Object_2( MJIEnv env, int clsObjRef, int newObjClsRef, int fileNameRef) { int typeNameRef = env.getReferenceField(newObjClsRef, "name"); String typeName = env.getStringObject(typeNameRef); String fileName = env.getStringObject(fileNameRef); try { FileInputStream fis = new FileInputStream(fileName); ObjectInputStream ois = new ObjectInputStream(fis); Object javaObject = ois.readObject(); String readObjectTypeName = javaObject.getClass().getCanonicalName(); int readObjRef = ObjectConverter.JPFObjectFromJavaObject(env, javaObject); return readObjRef; } catch (ClinitRequired clix){ env.repeatInvocation(); return MJIEnv.NULL; } catch (IOException iox){ throw new JPFException("failure reading object from file: " + fileName, iox); } catch (ClassNotFoundException cnfx){ throw new JPFException("failure reading object from file: " + fileName, cnfx); } } //--- those need to be kept in sync with the model side public static final int SEVERE = 1; public static final int WARNING = 2; public static final int INFO = 3; public static final int FINE = 4; public static final int FINER = 5; public static final int FINEST = 6; private static void log (JPFLogger logger, int logLevel, String msg){ switch (logLevel){ case SEVERE: logger.severe( msg); break; case WARNING: logger.warning( msg); break; case INFO: logger.info( msg); break; case FINE: logger.fine( msg); break; case FINER: logger.finer( msg); break; case FINEST: logger.finest( msg); break; default: throw new JPFException("unknown log level " + logLevel + " for logger " + logger.getName()); } } @MJI public static void log__Ljava_lang_String_2ILjava_lang_String_2__V (MJIEnv env, int clsObjRef, int loggerIdRef, int logLevel, int msgRef){ String loggerId = env.getStringObject(loggerIdRef); String msg = env.getStringObject(msgRef); JPFLogger logger = JPF.getLogger(loggerId); log( logger, logLevel, msg); } @MJI public static void log__Ljava_lang_String_2ILjava_lang_String_2Ljava_lang_String_2__V (MJIEnv env, int clsObjRef, int loggerIdRef, int logLevel, int arg1Ref, int arg2Ref){ String loggerId = env.getStringObject(loggerIdRef); String msg = env.getStringObject(arg1Ref) + env.getStringObject(arg2Ref); JPFLogger logger = JPF.getLogger(loggerId); log( logger, logLevel, msg); } @MJI public static void log__Ljava_lang_String_2ILjava_lang_String_2_3Ljava_lang_Object_2__V (MJIEnv env, int clsObjRef, int loggerIdRef, int logLevel, int fmtRef, int argsRef){ String loggerId = env.getStringObject(loggerIdRef); String fmt = env.getStringObject(fmtRef); JPFLogger logger = JPF.getLogger(loggerId); int[] argRefs = env.getReferenceArrayObject( argsRef); Object[] args = new Object[argRefs.length]; for (int i=0; i<args.length; i++){ ElementInfo eiArg = env.getElementInfo(argRefs[i]); if (eiArg.isStringObject()){ args[i] = env.getStringObject(argRefs[i]); } else if (eiArg.isBoxObject()){ args[i] = eiArg.asBoxObject(); } else { args[i] = eiArg.toString(); } } String msg = String.format(fmt, args); log( logger, logLevel, msg); } }
/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =======================================================================*/ // This class has been generated, DO NOT EDIT! package org.tensorflow.op.core; import java.util.Arrays; import org.tensorflow.GraphOperation; import org.tensorflow.Operand; import org.tensorflow.Operation; import org.tensorflow.OperationBuilder; import org.tensorflow.Output; import org.tensorflow.op.RawOp; import org.tensorflow.op.RawOpInputs; import org.tensorflow.op.Scope; import org.tensorflow.op.annotation.Endpoint; import org.tensorflow.op.annotation.OpInputsMetadata; import org.tensorflow.op.annotation.OpMetadata; import org.tensorflow.op.annotation.Operator; import org.tensorflow.proto.framework.DataType; import org.tensorflow.types.family.TNumber; import org.tensorflow.types.family.TType; /** * Applies sparse addition to individual values or slices in a Variable. * {@code ref} is a {@code Tensor} with rank {@code P} and {@code indices} is a {@code Tensor} of rank {@code Q}. * <p>{@code indices} must be integer tensor, containing indices into {@code ref}. * It must be shape {@code [d_0, ..., d_{Q-2}, K]} where {@code 0 < K <= P}. * <p>The innermost dimension of {@code indices} (with length {@code K}) corresponds to * indices into elements (if {@code K = P}) or slices (if {@code K < P}) along the {@code K}th * dimension of {@code ref}. * <p>{@code updates} is {@code Tensor} of rank {@code Q-1+P-K} with shape: * <pre> * [d_0, ..., d_{Q-2}, ref.shape[K], ..., ref.shape[P-1]] * </pre> * <p>For example, say we want to add 4 scattered elements to a rank-1 tensor to * 8 elements. In Python, that addition would look like this: * <pre> * ref = tf.Variable([1, 2, 3, 4, 5, 6, 7, 8]) * indices = tf.constant([[4], [3], [1], [7]]) * updates = tf.constant([9, 10, 11, 12]) * add = tf.scatter_nd_add(ref, indices, updates) * with tf.Session() as sess: * print sess.run(add) * </pre> * <p>The resulting update to ref would look like this: * <pre> * [1, 13, 3, 14, 14, 6, 7, 20] * </pre> * <p>See {@code tf.scatter_nd} for more details about how to make updates to * slices. * * @param <T> data type for {@code output_ref} output */ @OpMetadata( opType = ScatterNdAdd.OP_NAME, inputsClass = ScatterNdAdd.Inputs.class ) @Operator public final class ScatterNdAdd<T extends TType> extends RawOp implements Operand<T> { /** * The name of this op, as known by TensorFlow core engine */ public static final String OP_NAME = "ScatterNdAdd"; private Output<T> outputRef; public ScatterNdAdd(Operation operation) { super(operation, OP_NAME); int outputIdx = 0; outputRef = operation.output(outputIdx++); } /** * Factory method to create a class wrapping a new ScatterNdAdd operation. * * @param scope current scope * @param ref A mutable Tensor. Should be from a Variable node. * @param indices A Tensor. Must be one of the following types: int32, int64. * A tensor of indices into ref. * @param updates A Tensor. Must have the same type as ref. A tensor of updated values * to add to ref. * @param options carries optional attribute values * @param <T> data type for {@code ScatterNdAdd} output and operands * @return a new instance of ScatterNdAdd */ @Endpoint( describeByClass = true ) public static <T extends TType> ScatterNdAdd<T> create(Scope scope, Operand<T> ref, Operand<? extends TNumber> indices, Operand<T> updates, Options... options) { OperationBuilder opBuilder = scope.opBuilder(OP_NAME, "ScatterNdAdd"); opBuilder.addInput(ref.asOutput()); opBuilder.addInput(indices.asOutput()); opBuilder.addInput(updates.asOutput()); if (options != null) { for (Options opts : options) { if (opts.useLocking != null) { opBuilder.setAttr("use_locking", opts.useLocking); } } } return new ScatterNdAdd<>(opBuilder.build()); } /** * Sets the useLocking option. * * @param useLocking An optional bool. Defaults to True. If True, the assignment will * be protected by a lock; otherwise the behavior is undefined, * but may exhibit less contention. * @return this Options instance. */ public static Options useLocking(Boolean useLocking) { return new Options().useLocking(useLocking); } /** * Gets outputRef. * Same as ref. Returned as a convenience for operations that want * to use the updated values after the update is done. * @return outputRef. */ public Output<T> outputRef() { return outputRef; } @Override public Output<T> asOutput() { return outputRef; } /** * Optional attributes for {@link org.tensorflow.op.core.ScatterNdAdd} */ public static class Options { private Boolean useLocking; private Options() { } /** * Sets the useLocking option. * * @param useLocking An optional bool. Defaults to True. If True, the assignment will * be protected by a lock; otherwise the behavior is undefined, * but may exhibit less contention. * @return this Options instance. */ public Options useLocking(Boolean useLocking) { this.useLocking = useLocking; return this; } } @OpInputsMetadata( outputsClass = ScatterNdAdd.class ) public static class Inputs<T extends TType> extends RawOpInputs<ScatterNdAdd<T>> { /** * A mutable Tensor. Should be from a Variable node. */ public final Operand<T> ref; /** * A Tensor. Must be one of the following types: int32, int64. * A tensor of indices into ref. */ public final Operand<? extends TNumber> indices; /** * A Tensor. Must have the same type as ref. A tensor of updated values * to add to ref. */ public final Operand<T> updates; /** * The T attribute */ public final DataType T; /** * The Tindices attribute */ public final DataType Tindices; /** * An optional bool. Defaults to True. If True, the assignment will * be protected by a lock; otherwise the behavior is undefined, * but may exhibit less contention. */ public final boolean useLocking; public Inputs(GraphOperation op) { super(new ScatterNdAdd<>(op), op, Arrays.asList("T", "Tindices", "use_locking")); int inputIndex = 0; ref = (Operand<T>) op.input(inputIndex++); indices = (Operand<? extends TNumber>) op.input(inputIndex++); updates = (Operand<T>) op.input(inputIndex++); T = op.attributes().getAttrType("T"); Tindices = op.attributes().getAttrType("Tindices"); useLocking = op.attributes().getAttrBool("use_locking"); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper.Loading; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.*; import static org.elasticsearch.index.mapper.FieldMapper.DOC_VALUES_FORMAT; /** * */ public class TypeParsers { public static final String MULTI_FIELD_CONTENT_TYPE = "multi_field"; public static final Mapper.TypeParser multiFieldConverterTypeParser = new Mapper.TypeParser() { @Override public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { ContentPath.Type pathType = null; AbstractFieldMapper.Builder mainFieldBuilder = null; List<AbstractFieldMapper.Builder> fields = null; String firstType = null; for (Map.Entry<String, Object> entry : node.entrySet()) { String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("path")) { pathType = parsePathType(name, fieldNode.toString()); } else if (fieldName.equals("fields")) { Map<String, Object> fieldsNode = (Map<String, Object>) fieldNode; for (Map.Entry<String, Object> entry1 : fieldsNode.entrySet()) { String propName = entry1.getKey(); Map<String, Object> propNode = (Map<String, Object>) entry1.getValue(); String type; Object typeNode = propNode.get("type"); if (typeNode != null) { type = typeNode.toString(); if (firstType == null) { firstType = type; } } else { throw new MapperParsingException("No type specified for property [" + propName + "]"); } Mapper.TypeParser typeParser = parserContext.typeParser(type); if (typeParser == null) { throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + fieldName + "]"); } if (propName.equals(name)) { mainFieldBuilder = (AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext); } else { if (fields == null) { fields = new ArrayList<>(2); } fields.add((AbstractFieldMapper.Builder) typeParser.parse(propName, propNode, parserContext)); } } } } if (mainFieldBuilder == null) { if (fields == null) { // No fields at all were specified in multi_field, so lets return a non indexed string field. return new StringFieldMapper.Builder(name).index(false); } Mapper.TypeParser typeParser = parserContext.typeParser(firstType); if (typeParser == null) { // The first multi field's type is unknown mainFieldBuilder = new StringFieldMapper.Builder(name).index(false); } else { Mapper.Builder substitute = typeParser.parse(name, Collections.<String, Object>emptyMap(), parserContext); if (substitute instanceof AbstractFieldMapper.Builder) { mainFieldBuilder = ((AbstractFieldMapper.Builder) substitute).index(false); } else { // The first multi isn't a core field type mainFieldBuilder = new StringFieldMapper.Builder(name).index(false); } } } if (fields != null && pathType != null) { for (Mapper.Builder field : fields) { mainFieldBuilder.addMultiField(field); } mainFieldBuilder.multiFieldPathType(pathType); } else if (fields != null) { for (Mapper.Builder field : fields) { mainFieldBuilder.addMultiField(field); } } else if (pathType != null) { mainFieldBuilder.multiFieldPathType(pathType); } return mainFieldBuilder; } }; public static final String DOC_VALUES = "doc_values"; public static final String INDEX_OPTIONS_DOCS = "docs"; public static final String INDEX_OPTIONS_FREQS = "freqs"; public static final String INDEX_OPTIONS_POSITIONS = "positions"; public static final String INDEX_OPTIONS_OFFSETS = "offsets"; public static void parseNumberField(NumberFieldMapper.Builder builder, String name, Map<String, Object> numberNode, Mapper.TypeParser.ParserContext parserContext) { parseField(builder, name, numberNode, parserContext); for (Map.Entry<String, Object> entry : numberNode.entrySet()) { String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("precision_step")) { builder.precisionStep(nodeIntegerValue(propNode)); } else if (propName.equals("ignore_malformed")) { builder.ignoreMalformed(nodeBooleanValue(propNode)); } else if (propName.equals("coerce")) { builder.coerce(nodeBooleanValue(propNode)); } else if (propName.equals("omit_norms")) { builder.omitNorms(nodeBooleanValue(propNode)); } else if (propName.equals("similarity")) { builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString())); } else { parseMultiField(builder, name, parserContext, propName, propNode); } } } public static void parseField(AbstractFieldMapper.Builder builder, String name, Map<String, Object> fieldNode, Mapper.TypeParser.ParserContext parserContext) { for (Map.Entry<String, Object> entry : fieldNode.entrySet()) { final String propName = Strings.toUnderscoreCase(entry.getKey()); final Object propNode = entry.getValue(); if (propName.equals("index_name")) { builder.indexName(propNode.toString()); } else if (propName.equals("store")) { builder.store(parseStore(name, propNode.toString())); } else if (propName.equals("index")) { parseIndex(name, propNode.toString(), builder); } else if (propName.equals("tokenized")) { builder.tokenized(nodeBooleanValue(propNode)); } else if (propName.equals(DOC_VALUES)) { builder.docValues(nodeBooleanValue(propNode)); } else if (propName.equals("term_vector")) { parseTermVector(name, propNode.toString(), builder); } else if (propName.equals("boost")) { builder.boost(nodeFloatValue(propNode)); } else if (propName.equals("store_term_vectors")) { builder.storeTermVectors(nodeBooleanValue(propNode)); } else if (propName.equals("store_term_vector_offsets")) { builder.storeTermVectorOffsets(nodeBooleanValue(propNode)); } else if (propName.equals("store_term_vector_positions")) { builder.storeTermVectorPositions(nodeBooleanValue(propNode)); } else if (propName.equals("store_term_vector_payloads")) { builder.storeTermVectorPayloads(nodeBooleanValue(propNode)); } else if (propName.equals("omit_norms")) { builder.omitNorms(nodeBooleanValue(propNode)); } else if (propName.equals("norms")) { final Map<String, Object> properties = nodeMapValue(propNode, "norms"); for (Map.Entry<String, Object> entry2 : properties.entrySet()) { final String propName2 = Strings.toUnderscoreCase(entry2.getKey()); final Object propNode2 = entry2.getValue(); if (propName2.equals("enabled")) { builder.omitNorms(!nodeBooleanValue(propNode2)); } else if (propName2.equals(Loading.KEY)) { builder.normsLoading(Loading.parse(nodeStringValue(propNode2, null), null)); } } } else if (propName.equals("omit_term_freq_and_positions")) { final IndexOptions op = nodeBooleanValue(propNode) ? IndexOptions.DOCS_ONLY : IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; if (parserContext.indexVersionCreated().onOrAfter(Version.V_1_0_0_RC2)) { throw new ElasticsearchParseException("'omit_term_freq_and_positions' is not supported anymore - use ['index_options' : '" + op.name() + "'] instead"); } // deprecated option for BW compat builder.indexOptions(op); } else if (propName.equals("index_options")) { builder.indexOptions(nodeIndexOptionValue(propNode)); } else if (propName.equals("analyzer")) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); if (analyzer == null) { throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); } builder.indexAnalyzer(analyzer); builder.searchAnalyzer(analyzer); } else if (propName.equals("index_analyzer")) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); if (analyzer == null) { throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); } builder.indexAnalyzer(analyzer); } else if (propName.equals("search_analyzer")) { NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString()); if (analyzer == null) { throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); } builder.searchAnalyzer(analyzer); } else if (propName.equals("include_in_all")) { builder.includeInAll(nodeBooleanValue(propNode)); } else if (propName.equals("postings_format")) { String postingFormatName = propNode.toString(); builder.postingsFormat(parserContext.postingFormatService().get(postingFormatName)); } else if (propName.equals(DOC_VALUES_FORMAT)) { String docValuesFormatName = propNode.toString(); builder.docValuesFormat(parserContext.docValuesFormatService().get(docValuesFormatName)); } else if (propName.equals("similarity")) { builder.similarity(parserContext.similarityLookupService().similarity(propNode.toString())); } else if (propName.equals("fielddata")) { final Settings settings = ImmutableSettings.builder().put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata"))).build(); builder.fieldDataSettings(settings); } else if (propName.equals("copy_to")) { parseCopyFields(propNode, builder); } } } public static void parseMultiField(AbstractFieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) { if (propName.equals("path")) { builder.multiFieldPathType(parsePathType(name, propNode.toString())); } else if (propName.equals("fields")) { final Map<String, Object> multiFieldsPropNodes; if (propNode instanceof List && ((List<?>) propNode).isEmpty()) { multiFieldsPropNodes = Collections.emptyMap(); } else if (propNode instanceof Map) { multiFieldsPropNodes = (Map<String, Object>) propNode; } else { throw new MapperParsingException("Expected map for property [fields] on field [" + propNode + "] or " + "[" + propName + "] but got a " + propNode.getClass()); } for (Map.Entry<String, Object> multiFieldEntry : multiFieldsPropNodes.entrySet()) { String multiFieldName = multiFieldEntry.getKey(); if (!(multiFieldEntry.getValue() instanceof Map)) { throw new MapperParsingException("Illegal field [" + multiFieldName + "], only fields can be specified inside fields"); } @SuppressWarnings("unchecked") Map<String, Object> multiFieldNodes = (Map<String, Object>) multiFieldEntry.getValue(); String type; Object typeNode = multiFieldNodes.get("type"); if (typeNode != null) { type = typeNode.toString(); } else { throw new MapperParsingException("No type specified for property [" + multiFieldName + "]"); } Mapper.TypeParser typeParser = parserContext.typeParser(type); if (typeParser == null) { throw new MapperParsingException("No handler for type [" + type + "] declared on field [" + multiFieldName + "]"); } builder.addMultiField(typeParser.parse(multiFieldName, multiFieldNodes, parserContext)); } } } private static IndexOptions nodeIndexOptionValue(final Object propNode) { final String value = propNode.toString(); if (INDEX_OPTIONS_OFFSETS.equalsIgnoreCase(value)) { return IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS; } else if (INDEX_OPTIONS_POSITIONS.equalsIgnoreCase(value)) { return IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; } else if (INDEX_OPTIONS_FREQS.equalsIgnoreCase(value)) { return IndexOptions.DOCS_AND_FREQS; } else if (INDEX_OPTIONS_DOCS.equalsIgnoreCase(value)) { return IndexOptions.DOCS_ONLY; } else { throw new ElasticsearchParseException("Failed to parse index option [" + value + "]"); } } public static FormatDateTimeFormatter parseDateTimeFormatter(Object node) { return Joda.forPattern(node.toString()); } public static void parseTermVector(String fieldName, String termVector, AbstractFieldMapper.Builder builder) throws MapperParsingException { termVector = Strings.toUnderscoreCase(termVector); if ("no".equals(termVector)) { builder.storeTermVectors(false); } else if ("yes".equals(termVector)) { builder.storeTermVectors(true); } else if ("with_offsets".equals(termVector)) { builder.storeTermVectorOffsets(true); } else if ("with_positions".equals(termVector)) { builder.storeTermVectorPositions(true); } else if ("with_positions_offsets".equals(termVector)) { builder.storeTermVectorPositions(true); builder.storeTermVectorOffsets(true); } else if ("with_positions_payloads".equals(termVector)) { builder.storeTermVectorPositions(true); builder.storeTermVectorPayloads(true); } else if ("with_positions_offsets_payloads".equals(termVector)) { builder.storeTermVectorPositions(true); builder.storeTermVectorOffsets(true); builder.storeTermVectorPayloads(true); } else { throw new MapperParsingException("Wrong value for termVector [" + termVector + "] for field [" + fieldName + "]"); } } public static void parseIndex(String fieldName, String index, AbstractFieldMapper.Builder builder) throws MapperParsingException { index = Strings.toUnderscoreCase(index); if ("no".equals(index)) { builder.index(false); } else if ("not_analyzed".equals(index)) { builder.index(true); builder.tokenized(false); } else if ("analyzed".equals(index)) { builder.index(true); builder.tokenized(true); } else { throw new MapperParsingException("Wrong value for index [" + index + "] for field [" + fieldName + "]"); } } public static boolean parseStore(String fieldName, String store) throws MapperParsingException { if ("no".equals(store)) { return false; } else if ("yes".equals(store)) { return true; } else { return nodeBooleanValue(store); } } public static ContentPath.Type parsePathType(String name, String path) throws MapperParsingException { path = Strings.toUnderscoreCase(path); if ("just_name".equals(path)) { return ContentPath.Type.JUST_NAME; } else if ("full".equals(path)) { return ContentPath.Type.FULL; } else { throw new MapperParsingException("Wrong value for pathType [" + path + "] for object [" + name + "]"); } } @SuppressWarnings("unchecked") public static void parseCopyFields(Object propNode, AbstractFieldMapper.Builder builder) { AbstractFieldMapper.CopyTo.Builder copyToBuilder = new AbstractFieldMapper.CopyTo.Builder(); if (isArray(propNode)) { for(Object node : (List<Object>) propNode) { copyToBuilder.add(nodeStringValue(node, null)); } } else { copyToBuilder.add(nodeStringValue(propNode, null)); } builder.copyTo(copyToBuilder.build()); } }
/* * Copyright (C) 2010-2011 Richard Lincoln * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. * */ package cern.colt.util.tdouble; import java.util.concurrent.Future; import cern.colt.list.tdouble.DoubleArrayList; import cern.colt.list.tint.IntArrayList; import cern.colt.matrix.tdcomplex.DComplexFactory1D; import cern.colt.matrix.tdcomplex.DComplexFactory2D; import cern.colt.matrix.tdcomplex.DComplexMatrix1D; import cern.colt.matrix.tdcomplex.DComplexMatrix2D; import cern.colt.matrix.tdouble.DoubleFactory1D; import cern.colt.matrix.tdouble.DoubleMatrix1D; import cern.colt.matrix.tdouble.DoubleMatrix2D; import cern.colt.matrix.tint.IntFactory1D; import cern.colt.matrix.tint.IntMatrix1D; import cern.jet.math.tdouble.DoubleFunctions; import cern.jet.math.tint.IntFunctions; import edu.emory.mathcs.utils.ConcurrencyUtils; /** * * @author Richard Lincoln * */ public class Djp_util extends Object { private static final IntFunctions ifunc = IntFunctions.intFunctions; private static final DoubleFunctions dfunc = DoubleFunctions.functions; // singleton public static final Djp_util util = new Djp_util(); /** * Makes this class non instantiable, but still let's others inherit from * it. */ protected Djp_util() { } /** * Machine epsilon. */ public static final double EPS = 1e-15; /** * * @param stop * @return */ public static int[] irange(int stop) { return irange(0, stop); } /** * * @param start * @param stop * @return */ public static int[] irange(int start, int stop) { return irange(start, stop, 1); } /** * * @param start * @param stop * @param step * @return */ public static int[] irange(int start, int stop, int step) { int[] r = new int[stop - start]; int v = start; for (int i = 0; i < r.length; i++) { r[i] = v; v += step; } return r; } /** * * @param stop * @return */ public static double[] drange(int stop) { return drange(0, stop); } /** * * @param start * @param stop * @return */ public static double[] drange(int start, int stop) { return drange(start, stop, 1); } /** * * @param start * @param stop * @param step * @return */ public static double[] drange(int start, int stop, int step) { double[] r = new double[stop - start]; int v = start; for (int i = 0; i < r.length; i++) { r[i] = v; v += step; } return r; } /** * * @param stop * @return an arithmetic progression. */ public static double[] drange(double stop) { return drange(0, stop, 1); } /** * * @param start * @param stop an arithmetic progression. * @return */ public static double[] drange(double start, double stop) { return drange(start, stop, 1); } /** * * @param start * @param stop * @param step increment (or decrement) * @return an arithmetic progression. */ public static double[] drange(double start, double stop, double step) { double[] r = new double[(int) ((stop - start) / step)]; double v = start; for (int i = 0; i < r.length; i++) { r[i] = v; v += step; } return r; } /** * * @param n * @return */ public static int[] zeros(int size) { final int[] values = new int[size]; int nthreads = ConcurrencyUtils.getNumberOfThreads(); if ((nthreads > 1) && (size >= ConcurrencyUtils.getThreadsBeginN_1D())) { nthreads = Math.min(nthreads, size); Future<?>[] futures = new Future[nthreads]; int k = size / nthreads; for (int j = 0; j < nthreads; j++) { final int firstIdx = j * k; final int lastIdx = (j == nthreads - 1) ? size : firstIdx + k; futures[j] = ConcurrencyUtils.submit(new Runnable() { public void run() { for (int i = firstIdx; i < lastIdx; i++) { values[i] = 0; } } }); } ConcurrencyUtils.waitForCompletion(futures); } else { for (int i = 0; i < size; i++) { values[i] = 0; } } return values; } /** * * @param size array length * @return an integer array with all elements = 1. */ public static int[] ones(int size) { final int[] values = new int[size]; int nthreads = ConcurrencyUtils.getNumberOfThreads(); if ((nthreads > 1) && (size >= ConcurrencyUtils.getThreadsBeginN_1D())) { nthreads = Math.min(nthreads, size); Future<?>[] futures = new Future[nthreads]; int k = size / nthreads; for (int j = 0; j < nthreads; j++) { final int firstIdx = j * k; final int lastIdx = (j == nthreads - 1) ? size : firstIdx + k; futures[j] = ConcurrencyUtils.submit(new Runnable() { public void run() { for (int i = firstIdx; i < lastIdx; i++) { values[i] = 1; } } }); } ConcurrencyUtils.waitForCompletion(futures); } else { for (int i = 0; i < size; i++) { values[i] = 1; } } return values; } /** * * @param d * @return */ public static int[] inta(final DoubleMatrix1D d) { int size = (int) d.size(); final int[] values = new int[size]; int nthreads = ConcurrencyUtils.getNumberOfThreads(); if ((nthreads > 1) && (size >= ConcurrencyUtils.getThreadsBeginN_1D())) { nthreads = Math.min(nthreads, size); Future<?>[] futures = new Future[nthreads]; int k = size / nthreads; for (int j = 0; j < nthreads; j++) { final int firstIdx = j * k; final int lastIdx = (j == nthreads - 1) ? size : firstIdx + k; futures[j] = ConcurrencyUtils.submit(new Runnable() { public void run() { for (int i = firstIdx; i < lastIdx; i++) { values[i] = (int) Math.round(d.getQuick(i)); } } }); } ConcurrencyUtils.waitForCompletion(futures); } else { for (int i = 0; i < size; i++) { values[i] = (int) d.getQuick(i); } } return values; } /** * * @param d * @return */ public static IntMatrix1D intm(final DoubleMatrix1D d) { int size = (int) d.size(); final IntMatrix1D values = IntFactory1D.dense.make(size); int nthreads = ConcurrencyUtils.getNumberOfThreads(); if ((nthreads > 1) && (size >= ConcurrencyUtils.getThreadsBeginN_1D())) { nthreads = Math.min(nthreads, size); Future<?>[] futures = new Future[nthreads]; int k = size / nthreads; for (int j = 0; j < nthreads; j++) { final int firstIdx = j * k; final int lastIdx = (j == nthreads - 1) ? size : firstIdx + k; futures[j] = ConcurrencyUtils.submit(new Runnable() { public void run() { for (int i = firstIdx; i < lastIdx; i++) { values.setQuick(i, (int) Math.round( d.getQuick(i)) ); } } }); } ConcurrencyUtils.waitForCompletion(futures); } else { for (int i = 0; i < size; i++) { values.setQuick(i, (int) d.getQuick(i)); } } return values; } /** * * @param d * @return */ public static DoubleMatrix1D dbla(final int[] ix) { int size = ix.length; final DoubleMatrix1D values = DoubleFactory1D.dense.make(size); int nthreads = ConcurrencyUtils.getNumberOfThreads(); if ((nthreads > 1) && (size >= ConcurrencyUtils.getThreadsBeginN_1D())) { nthreads = Math.min(nthreads, size); Future<?>[] futures = new Future[nthreads]; int k = size / nthreads; for (int j = 0; j < nthreads; j++) { final int firstIdx = j * k; final int lastIdx = (j == nthreads - 1) ? size : firstIdx + k; futures[j] = ConcurrencyUtils.submit(new Runnable() { public void run() { for (int i = firstIdx; i < lastIdx; i++) { values.setQuick(i, ix[i]); } } }); } ConcurrencyUtils.waitForCompletion(futures); } else { for (int i = 0; i < size; i++) { values.setQuick(i, ix[i]); } } return values; } /** * * @param d * @return */ public static DoubleMatrix1D dblm(final IntMatrix1D ix) { int size = (int) ix.size(); final DoubleMatrix1D values = DoubleFactory1D.dense.make(size); int nthreads = ConcurrencyUtils.getNumberOfThreads(); if ((nthreads > 1) && (size >= ConcurrencyUtils.getThreadsBeginN_1D())) { nthreads = Math.min(nthreads, size); Future<?>[] futures = new Future[nthreads]; int k = size / nthreads; for (int j = 0; j < nthreads; j++) { final int firstIdx = j * k; final int lastIdx = (j == nthreads - 1) ? size : firstIdx + k; futures[j] = ConcurrencyUtils.submit(new Runnable() { public void run() { for (int i = firstIdx; i < lastIdx; i++) { values.setQuick(i, ix.getQuick(i)); } } }); } ConcurrencyUtils.waitForCompletion(futures); } else { for (int i = 0; i < size; i++) { values.setQuick(i, ix.getQuick(i)); } } return values; } /** * * @param t * @return */ public static int max(int[] t) { int maximum = t[0]; for (int i=1; i < t.length; i++) if (t[i] > maximum) maximum = t[i]; return maximum; } /** * * @param a * @param b * @return */ public static int[] icat(int[] a, int[] b) { int[] c = new int[a.length + b.length]; System.arraycopy(a, 0, c, 0, a.length); System.arraycopy(b, 0, c, a.length, b.length); return c; } /** * * @param a * @param b * @return */ public static double[] dcat(double[] a, double[] b) { double[] c = new double[a.length + b.length]; System.arraycopy(a, 0, c, 0, a.length); System.arraycopy(b, 0, c, a.length, b.length); return c; } /** * * @param a * @param b * @return */ public static String[] scat(String[] a, String[] b) { String[] c = new String[a.length + b.length]; System.arraycopy(a, 0, c, 0, a.length); System.arraycopy(b, 0, c, a.length, b.length); return c; } /** * * @param a * @return */ public static int[] nonzero(IntMatrix1D a) { IntArrayList indexList = new IntArrayList(); int size = (int) a.size(); int rem = size % 2; if (rem == 1) { int value = a.getQuick(0); if (value != 0) indexList.add(0); } for (int i = rem; i < size; i += 2) { int value = a.getQuick(i); if (value != 0) indexList.add(i); value = a.getQuick(i + 1); if (value != 0) indexList.add(i + 1); } indexList.trimToSize(); return indexList.elements(); } /** * * @param a * @return */ public static int[] nonzero(DoubleMatrix1D a) { IntArrayList indexList = new IntArrayList(); int size = (int) a.size(); int rem = size % 2; if (rem == 1) { double value = a.getQuick(0); if (value != 0) indexList.add(0); } for (int i = rem; i < size; i += 2) { double value = a.getQuick(i); if (value != 0) indexList.add(i); value = a.getQuick(i + 1); if (value != 0) indexList.add(i + 1); } indexList.trimToSize(); return indexList.elements(); } /** * * @param r polar radius. * @param theta polar angle in radians. * @return complex polar representation. */ public static DComplexMatrix1D polar(DoubleMatrix1D r, DoubleMatrix1D theta) { return polar(r, theta, true); } /** * * @param r polar radius. * @param theta polar angle. * @param radians is 'theta' expressed in radians. * @return complex polar representation. */ @SuppressWarnings("static-access") public static DComplexMatrix1D polar(DoubleMatrix1D r, DoubleMatrix1D theta, boolean radians) { DoubleMatrix1D real = theta.copy(); DoubleMatrix1D imag = theta.copy(); if (!radians) { real.assign(dfunc.chain(dfunc.mult(Math.PI), dfunc.div(180))); imag.assign(dfunc.chain(dfunc.mult(Math.PI), dfunc.div(180))); } real.assign(dfunc.cos); imag.assign(dfunc.sin); real.assign(r, dfunc.mult); imag.assign(r, dfunc.mult); DComplexMatrix1D cmplx = DComplexFactory1D.dense.make((int) r.size()); cmplx.assignReal(real); cmplx.assignImaginary(imag); return cmplx; } public static DComplexMatrix2D polar(DoubleMatrix2D r, DoubleMatrix2D theta) { return polar(r, theta, true); } @SuppressWarnings("static-access") public static DComplexMatrix2D polar(DoubleMatrix2D r, DoubleMatrix2D theta, boolean radians) { DoubleMatrix2D real = theta.copy(); DoubleMatrix2D imag = theta.copy(); if (!radians) { real.assign(dfunc.chain(dfunc.mult(Math.PI), dfunc.div(180))); imag.assign(dfunc.chain(dfunc.mult(Math.PI), dfunc.div(180))); } real.assign(dfunc.cos); imag.assign(dfunc.sin); real.assign(r, dfunc.mult); imag.assign(r, dfunc.mult); DComplexMatrix2D cmplx = DComplexFactory2D.dense.make(r.rows(), r.columns()); cmplx.assignReal(real); cmplx.assignImaginary(imag); return cmplx; } /** * * @param x * @return [x(1)-x(0) x(2)-x(1) ... x(n)-x(n-1)] */ @SuppressWarnings("static-access") public static IntMatrix1D diff(IntMatrix1D x) { int size = (int) x.size() -1; IntMatrix1D d = IntFactory1D.dense.make(size); for (int i = 0; i < size; i++) d.set(i, ifunc.minus.apply(x.get(i+1), x.get(i))); return d; } /** * * @param x * @return [x(1)-x(0) x(2)-x(1) ... x(n)-x(n-1)] */ @SuppressWarnings("static-access") public static DoubleMatrix1D diff(DoubleMatrix1D x) { int size = (int) x.size() -1; DoubleMatrix1D d = DoubleFactory1D.dense.make(size); for (int i = 0; i < size; i++) d.set(i, dfunc.minus.apply(x.get(i+1), x.get(i))); return d; } /** * * @param x an array of integers. * @return true if any element of vector x is a nonzero number. */ public static boolean any(int[] x) { for (int i : x) if (i != 0) return true; return false; } /** * * @param x a vector of integers. * @return true if any element of vector x is a nonzero number. */ public static boolean any(IntMatrix1D x) { IntArrayList indexList = new IntArrayList(); x.getNonZeros(indexList, new IntArrayList()); return indexList.size() > 0; } /** * * @param x a vector of doubles. * @return true if any element of vector x is a nonzero number. */ public static boolean any(DoubleMatrix1D x) { IntArrayList indexList = new IntArrayList(); x.getNonZeros(indexList, new DoubleArrayList()); return indexList.size() > 0; } /** * * @param x * @return */ public static IntMatrix1D any(DoubleMatrix2D x) { int cols = x.columns(); IntMatrix1D y = IntFactory1D.dense.make(cols); for (int i = 0; i < cols; i++) { int a = any(x.viewColumn(i)) ? 1 : 0; y.set(i, a); } return y; } /** * * @param x a vector of integers. * @return true if all elements of 'x' are nonzero. */ public static boolean all(IntMatrix1D x) { IntArrayList indexList = new IntArrayList(); x.getNonZeros(indexList, null); return x.size() == indexList.size(); } /** * * @param x a vector of doubles. * @return true if all elements of 'x' are nonzero. */ public static boolean all(DoubleMatrix1D x) { IntArrayList indexList = new IntArrayList(); x.getNonZeros(indexList, null); return x.size() == indexList.size(); } /** * * @param real real component, may be null * @param imaginary imaginary component, may be null * @return a complex vector */ public static DComplexMatrix1D complex(DoubleMatrix1D real, DoubleMatrix1D imaginary) { int size = 0; if (real != null) size = (int) real.size(); if (imaginary != null) size = (int) imaginary.size(); DComplexMatrix1D cmplx = DComplexFactory1D.dense.make(size); if (real != null) cmplx.assignReal(real); if (imaginary != null) cmplx.assignImaginary(imaginary); return cmplx; } /** * * @param real real component, may be null * @param imaginary imaginary component, may be null * @return a complex matrix */ public static DComplexMatrix2D complex(DoubleMatrix2D real, DoubleMatrix2D imaginary) { DComplexMatrix2D cmplx = DComplexFactory2D.dense.make(real.rows(), real.columns()); if (real != null) cmplx.assignReal(real); if (imaginary != null) cmplx.assignImaginary(imaginary); return cmplx; } /** * * @param rows * @param cols * @param I * @param J * @return */ public static IntMatrix1D sub2ind(int rows, int cols, IntMatrix1D I, IntMatrix1D J) { return sub2ind(rows, cols, I, J, true); } /** * * @param rows * @param cols * @param I * @param J * @param row_major * @return */ @SuppressWarnings("static-access") public static IntMatrix1D sub2ind(int rows, int cols, IntMatrix1D I, IntMatrix1D J, boolean row_major) { IntMatrix1D ind; if (row_major) { ind = I.copy().assign(ifunc.mod(rows)).assign(ifunc.mult(cols)).assign(J.copy().assign(ifunc.mod(cols)), ifunc.plus); } else { ind = J.copy().assign(ifunc.mod(cols)).assign(ifunc.max(rows)).assign(I.copy().assign(ifunc.mod(rows)), ifunc.plus); } return ind; } }
package org.apache.maven.plugins.enforcer; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.maven.enforcer.rule.api.EnforcerRuleException; import org.apache.maven.enforcer.rule.api.EnforcerRuleHelper; import org.apache.maven.model.Dependency; import org.apache.maven.model.Model; import org.apache.maven.model.Profile; import org.apache.maven.model.io.xpp3.MavenXpp3Reader; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.component.configurator.expression.ExpressionEvaluationException; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.xml.pull.XmlPullParserException; /** * Since Maven 3 'dependencies.dependency.(groupId:artifactId:type:classifier)' must be unique. * Early versions of Maven 3 already warn, this rule can force to break a build for this reason. * * @author Robert Scholte * @since 1.3 * */ public class BanDuplicatePomDependencyVersions extends AbstractNonCacheableEnforcerRule { public void execute( EnforcerRuleHelper helper ) throws EnforcerRuleException { // get the project MavenProject project; try { project = (MavenProject) helper.evaluate( "${project}" ); } catch ( ExpressionEvaluationException eee ) { throw new EnforcerRuleException( "Unable to retrieve the MavenProject: ", eee ); } // re-read model, because M3 uses optimized model MavenXpp3Reader modelReader = new MavenXpp3Reader(); FileReader pomReader = null; Model model; try { pomReader = new FileReader( project.getFile() ); model = modelReader.read( pomReader ); } catch ( FileNotFoundException e ) { throw new EnforcerRuleException( "Unable to retrieve the MavenProject: ", e ); } catch ( IOException e ) { throw new EnforcerRuleException( "Unable to retrieve the MavenProject: ", e ); } catch ( XmlPullParserException e ) { throw new EnforcerRuleException( "Unable to retrieve the MavenProject: ", e ); } finally { IOUtil.close( pomReader ); } // @todo reuse ModelValidator when possible // Object modelValidator = null; // try // { // modelValidator = helper.getComponent( "org.apache.maven.model.validation.ModelValidator" ); // } // catch ( ComponentLookupException e1 ) // { // // noop // } // if( modelValidator == null ) // { maven2Validation( helper, model ); // } // else // { // } } private void maven2Validation( EnforcerRuleHelper helper, Model model ) throws EnforcerRuleException { @SuppressWarnings( "unchecked" ) List<Dependency> dependencies = model.getDependencies(); Map<String, Integer> duplicateDependencies = validateDependencies( dependencies ); int duplicates = duplicateDependencies.size(); StringBuilder summary = new StringBuilder(); messageBuilder( duplicateDependencies, "dependencies.dependency", summary ); if ( model.getDependencyManagement() != null ) { @SuppressWarnings( "unchecked" ) List<Dependency> managementDependencies = model.getDependencies(); Map<String, Integer> duplicateManagementDependencies = validateDependencies( managementDependencies ); duplicates += duplicateManagementDependencies.size(); messageBuilder( duplicateManagementDependencies, "dependencyManagement.dependencies.dependency", summary ); } @SuppressWarnings( "unchecked" ) List<Profile> profiles = model.getProfiles(); for ( Profile profile : profiles ) { @SuppressWarnings( "unchecked" ) List<Dependency> profileDependencies = profile.getDependencies(); Map<String, Integer> duplicateProfileDependencies = validateDependencies( profileDependencies ); duplicates += duplicateProfileDependencies.size(); messageBuilder( duplicateProfileDependencies, "profiles.profile[" + profile.getId() + "].dependencies.dependency", summary ); if ( model.getDependencyManagement() != null ) { @SuppressWarnings( "unchecked" ) List<Dependency> profileManagementDependencies = profile.getDependencies(); Map<String, Integer> duplicateProfileManagementDependencies = validateDependencies( profileManagementDependencies ); duplicates += duplicateProfileManagementDependencies.size(); messageBuilder( duplicateProfileManagementDependencies, "profiles.profile[" + profile.getId() + "].dependencyManagement.dependencies.dependency", summary ); } } if ( summary.length() > 0 ) { StringBuilder message = new StringBuilder(); message.append( "Found " ).append( duplicates ).append( " duplicate dependency " ); message.append( duplicateDependencies.size() == 1 ? "declaration" : "declarations" ).append( " in this project:\n" ); message.append( summary ); throw new EnforcerRuleException( message.toString() ); } } private void messageBuilder( Map<String, Integer> duplicateDependencies, String prefix, StringBuilder message ) { if ( !duplicateDependencies.isEmpty() ) { for ( Map.Entry<String, Integer> entry : duplicateDependencies.entrySet() ) { message.append( " - " ).append( prefix ).append( '[' ).append( entry.getKey() ).append( "] ( " ).append( entry.getValue() ).append( " times )\n" ); } } } private Map<String, Integer> validateDependencies( List<Dependency> dependencies ) throws EnforcerRuleException { Map<String, Integer> duplicateDeps = new HashMap<String, Integer>(); Set<String> deps = new HashSet<String>(); for ( Dependency dependency : dependencies ) { String key = dependency.getManagementKey(); if ( deps.contains( key ) ) { int times = 1; if ( duplicateDeps.containsKey( key ) ) { times = duplicateDeps.get( key ); } duplicateDeps.put( key, times + 1 ); } else { deps.add( key ); } } return duplicateDeps; } }
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.util.concurrent; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import com.google.common.annotations.Beta; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import java.util.concurrent.Callable; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.locks.ReentrantLock; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.concurrent.GuardedBy; /** * Base class for services that can implement {@link #startUp} and {@link #shutDown} but while in * the "running" state need to perform a periodic task. Subclasses can implement {@link #startUp}, * {@link #shutDown} and also a {@link #runOneIteration} method that will be executed periodically. * * <p>This class uses the {@link ScheduledExecutorService} returned from {@link #executor} to run * the {@link #startUp} and {@link #shutDown} methods and also uses that service to schedule the * {@link #runOneIteration} that will be executed periodically as specified by its * {@link Scheduler}. When this service is asked to stop via {@link #stopAsync} it will cancel the * periodic task (but not interrupt it) and wait for it to stop before running the * {@link #shutDown} method. * * <p>Subclasses are guaranteed that the life cycle methods ({@link #runOneIteration}, {@link * #startUp} and {@link #shutDown}) will never run concurrently. Notably, if any execution of {@link * #runOneIteration} takes longer than its schedule defines, then subsequent executions may start * late. Also, all life cycle methods are executed with a lock held, so subclasses can safely * modify shared state without additional synchronization necessary for visibility to later * executions of the life cycle methods. * * <h3>Usage Example</h3> * * <p>Here is a sketch of a service which crawls a website and uses the scheduling capabilities to * rate limit itself. <pre> {@code * class CrawlingService extends AbstractScheduledService { * private Set<Uri> visited; * private Queue<Uri> toCrawl; * protected void startUp() throws Exception { * toCrawl = readStartingUris(); * } * * protected void runOneIteration() throws Exception { * Uri uri = toCrawl.remove(); * Collection<Uri> newUris = crawl(uri); * visited.add(uri); * for (Uri newUri : newUris) { * if (!visited.contains(newUri)) { toCrawl.add(newUri); } * } * } * * protected void shutDown() throws Exception { * saveUris(toCrawl); * } * * protected Scheduler scheduler() { * return Scheduler.newFixedRateSchedule(0, 1, TimeUnit.SECONDS); * } * }}</pre> * * <p>This class uses the life cycle methods to read in a list of starting URIs and save the set of * outstanding URIs when shutting down. Also, it takes advantage of the scheduling functionality to * rate limit the number of queries we perform. * * @author Luke Sandberg * @since 11.0 */ @Beta public abstract class AbstractScheduledService implements Service { private static final Logger logger = Logger.getLogger(AbstractScheduledService.class.getName()); /** * A scheduler defines the policy for how the {@link AbstractScheduledService} should run its * task. * * <p>Consider using the {@link #newFixedDelaySchedule} and {@link #newFixedRateSchedule} factory * methods, these provide {@link Scheduler} instances for the common use case of running the * service with a fixed schedule. If more flexibility is needed then consider subclassing * {@link CustomScheduler}. * * @author Luke Sandberg * @since 11.0 */ public abstract static class Scheduler { /** * Returns a {@link Scheduler} that schedules the task using the * {@link ScheduledExecutorService#scheduleWithFixedDelay} method. * * @param initialDelay the time to delay first execution * @param delay the delay between the termination of one execution and the commencement of the * next * @param unit the time unit of the initialDelay and delay parameters */ public static Scheduler newFixedDelaySchedule(final long initialDelay, final long delay, final TimeUnit unit) { return new Scheduler() { @Override public Future<?> schedule(AbstractService service, ScheduledExecutorService executor, Runnable task) { return executor.scheduleWithFixedDelay(task, initialDelay, delay, unit); } }; } /** * Returns a {@link Scheduler} that schedules the task using the * {@link ScheduledExecutorService#scheduleAtFixedRate} method. * * @param initialDelay the time to delay first execution * @param period the period between successive executions of the task * @param unit the time unit of the initialDelay and period parameters */ public static Scheduler newFixedRateSchedule(final long initialDelay, final long period, final TimeUnit unit) { return new Scheduler() { @Override public Future<?> schedule(AbstractService service, ScheduledExecutorService executor, Runnable task) { return executor.scheduleAtFixedRate(task, initialDelay, period, unit); } }; } /** Schedules the task to run on the provided executor on behalf of the service. */ abstract Future<?> schedule(AbstractService service, ScheduledExecutorService executor, Runnable runnable); private Scheduler() {} } /* use AbstractService for state management */ private final AbstractService delegate = new AbstractService() { // A handle to the running task so that we can stop it when a shutdown has been requested. // These two fields are volatile because their values will be accessed from multiple threads. private volatile Future<?> runningTask; private volatile ScheduledExecutorService executorService; // This lock protects the task so we can ensure that none of the template methods (startUp, // shutDown or runOneIteration) run concurrently with one another. // TODO(user): why don't we use ListenableFuture to sequence things? Then we could drop the // lock. private final ReentrantLock lock = new ReentrantLock(); private final Runnable task = new Runnable() { @Override public void run() { lock.lock(); try { if (runningTask.isCancelled()) { // task may have been cancelled while blocked on the lock. return; } AbstractScheduledService.this.runOneIteration(); } catch (Throwable t) { try { shutDown(); } catch (Exception ignored) { logger.log(Level.WARNING, "Error while attempting to shut down the service after failure.", ignored); } notifyFailed(t); runningTask.cancel(false); // prevent future invocations. } finally { lock.unlock(); } } }; @Override protected final void doStart() { executorService = MoreExecutors.renamingDecorator(executor(), new Supplier<String>() { @Override public String get() { return serviceName() + " " + state(); } }); executorService.execute(new Runnable() { @Override public void run() { lock.lock(); try { startUp(); runningTask = scheduler().schedule(delegate, executorService, task); notifyStarted(); } catch (Throwable t) { notifyFailed(t); if (runningTask != null) { // prevent the task from running if possible runningTask.cancel(false); } } finally { lock.unlock(); } } }); } @Override protected final void doStop() { runningTask.cancel(false); executorService.execute(new Runnable() { @Override public void run() { try { lock.lock(); try { if (state() != State.STOPPING) { // This means that the state has changed since we were scheduled. This implies that // an execution of runOneIteration has thrown an exception and we have transitioned // to a failed state, also this means that shutDown has already been called, so we // do not want to call it again. return; } shutDown(); } finally { lock.unlock(); } notifyStopped(); } catch (Throwable t) { notifyFailed(t); } } }); } }; /** Constructor for use by subclasses. */ protected AbstractScheduledService() {} /** * Run one iteration of the scheduled task. If any invocation of this method throws an exception, * the service will transition to the {@link Service.State#FAILED} state and this method will no * longer be called. */ protected abstract void runOneIteration() throws Exception; /** * Start the service. * * <p>By default this method does nothing. */ protected void startUp() throws Exception {} /** * Stop the service. This is guaranteed not to run concurrently with {@link #runOneIteration}. * * <p>By default this method does nothing. */ protected void shutDown() throws Exception {} /** * Returns the {@link Scheduler} object used to configure this service. This method will only be * called once. */ protected abstract Scheduler scheduler(); /** * Returns the {@link ScheduledExecutorService} that will be used to execute the {@link #startUp}, * {@link #runOneIteration} and {@link #shutDown} methods. If this method is overridden the * executor will not be {@linkplain ScheduledExecutorService#shutdown shutdown} when this * service {@linkplain Service.State#TERMINATED terminates} or * {@linkplain Service.State#TERMINATED fails}. Subclasses may override this method to supply a * custom {@link ScheduledExecutorService} instance. This method is guaranteed to only be called * once. * * <p>By default this returns a new {@link ScheduledExecutorService} with a single thread thread * pool that sets the name of the thread to the {@linkplain #serviceName() service name}. * Also, the pool will be {@linkplain ScheduledExecutorService#shutdown() shut down} when the * service {@linkplain Service.State#TERMINATED terminates} or * {@linkplain Service.State#TERMINATED fails}. */ protected ScheduledExecutorService executor() { final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor( new ThreadFactory() { @Override public Thread newThread(Runnable runnable) { return MoreExecutors.newThread(serviceName(), runnable); } }); // Add a listener to shutdown the executor after the service is stopped. This ensures that the // JVM shutdown will not be prevented from exiting after this service has stopped or failed. // Technically this listener is added after start() was called so it is a little gross, but it // is called within doStart() so we know that the service cannot terminate or fail concurrently // with adding this listener so it is impossible to miss an event that we are interested in. addListener(new Listener() { @Override public void terminated(State from) { executor.shutdown(); } @Override public void failed(State from, Throwable failure) { executor.shutdown(); } }, directExecutor()); return executor; } /** * Returns the name of this service. {@link AbstractScheduledService} may include the name in * debugging output. * * @since 14.0 */ protected String serviceName() { return getClass().getSimpleName(); } @Override public String toString() { return serviceName() + " [" + state() + "]"; } @Override public final boolean isRunning() { return delegate.isRunning(); } @Override public final State state() { return delegate.state(); } /** * @since 13.0 */ @Override public final void addListener(Listener listener, Executor executor) { delegate.addListener(listener, executor); } /** * @since 14.0 */ @Override public final Throwable failureCause() { return delegate.failureCause(); } /** * @since 15.0 */ @Override public final Service startAsync() { delegate.startAsync(); return this; } /** * @since 15.0 */ @Override public final Service stopAsync() { delegate.stopAsync(); return this; } /** * @since 15.0 */ @Override public final void awaitRunning() { delegate.awaitRunning(); } /** * @since 15.0 */ @Override public final void awaitRunning(long timeout, TimeUnit unit) throws TimeoutException { delegate.awaitRunning(timeout, unit); } /** * @since 15.0 */ @Override public final void awaitTerminated() { delegate.awaitTerminated(); } /** * @since 15.0 */ @Override public final void awaitTerminated(long timeout, TimeUnit unit) throws TimeoutException { delegate.awaitTerminated(timeout, unit); } /** * A {@link Scheduler} that provides a convenient way for the {@link AbstractScheduledService} to * use a dynamically changing schedule. After every execution of the task, assuming it hasn't * been cancelled, the {@link #getNextSchedule} method will be called. * * @author Luke Sandberg * @since 11.0 */ @Beta public abstract static class CustomScheduler extends Scheduler { /** * A callable class that can reschedule itself using a {@link CustomScheduler}. */ private class ReschedulableCallable extends ForwardingFuture<Void> implements Callable<Void> { /** The underlying task. */ private final Runnable wrappedRunnable; /** The executor on which this Callable will be scheduled. */ private final ScheduledExecutorService executor; /** * The service that is managing this callable. This is used so that failure can be * reported properly. */ private final AbstractService service; /** * This lock is used to ensure safe and correct cancellation, it ensures that a new task is * not scheduled while a cancel is ongoing. Also it protects the currentFuture variable to * ensure that it is assigned atomically with being scheduled. */ private final ReentrantLock lock = new ReentrantLock(); /** The future that represents the next execution of this task.*/ @GuardedBy("lock") private Future<Void> currentFuture; ReschedulableCallable(AbstractService service, ScheduledExecutorService executor, Runnable runnable) { this.wrappedRunnable = runnable; this.executor = executor; this.service = service; } @Override public Void call() throws Exception { wrappedRunnable.run(); reschedule(); return null; } /** * Atomically reschedules this task and assigns the new future to {@link #currentFuture}. */ public void reschedule() { // We reschedule ourselves with a lock held for two reasons. 1. we want to make sure that // cancel calls cancel on the correct future. 2. we want to make sure that the assignment // to currentFuture doesn't race with itself so that currentFuture is assigned in the // correct order. lock.lock(); try { if (currentFuture == null || !currentFuture.isCancelled()) { final Schedule schedule = CustomScheduler.this.getNextSchedule(); currentFuture = executor.schedule(this, schedule.delay, schedule.unit); } } catch (Throwable e) { // If an exception is thrown by the subclass then we need to make sure that the service // notices and transitions to the FAILED state. We do it by calling notifyFailed directly // because the service does not monitor the state of the future so if the exception is not // caught and forwarded to the service the task would stop executing but the service would // have no idea. service.notifyFailed(e); } finally { lock.unlock(); } } // N.B. Only protect cancel and isCancelled because those are the only methods that are // invoked by the AbstractScheduledService. @Override public boolean cancel(boolean mayInterruptIfRunning) { // Ensure that a task cannot be rescheduled while a cancel is ongoing. lock.lock(); try { return currentFuture.cancel(mayInterruptIfRunning); } finally { lock.unlock(); } } @Override public boolean isCancelled() { lock.lock(); try { return currentFuture.isCancelled(); } finally { lock.unlock(); } } @Override protected Future<Void> delegate() { throw new UnsupportedOperationException( "Only cancel and isCancelled is supported by this future"); } } @Override final Future<?> schedule(AbstractService service, ScheduledExecutorService executor, Runnable runnable) { ReschedulableCallable task = new ReschedulableCallable(service, executor, runnable); task.reschedule(); return task; } /** * A value object that represents an absolute delay until a task should be invoked. * * @author Luke Sandberg * @since 11.0 */ @Beta protected static final class Schedule { private final long delay; private final TimeUnit unit; /** * @param delay the time from now to delay execution * @param unit the time unit of the delay parameter */ public Schedule(long delay, TimeUnit unit) { this.delay = delay; this.unit = Preconditions.checkNotNull(unit); } } /** * Calculates the time at which to next invoke the task. * * <p>This is guaranteed to be called immediately after the task has completed an iteration and * on the same thread as the previous execution of {@link * AbstractScheduledService#runOneIteration}. * * @return a schedule that defines the delay before the next execution. */ protected abstract Schedule getNextSchedule() throws Exception; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tajo.conf; import com.google.common.base.Preconditions; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.tajo.TajoConstants; import org.apache.tajo.util.NetUtils; import org.apache.tajo.util.TUtil; import java.io.IOException; import java.io.PrintStream; import java.net.InetSocketAddress; import java.util.Map; public class TajoConf extends Configuration { static { Configuration.addDefaultResource("catalog-default.xml"); Configuration.addDefaultResource("catalog-site.xml"); Configuration.addDefaultResource("storage-default.xml"); Configuration.addDefaultResource("storage-site.xml"); Configuration.addDefaultResource("tajo-default.xml"); Configuration.addDefaultResource("tajo-site.xml"); } private static final String EMPTY_VALUE = ""; private static final Map<String, ConfVars> vars = TUtil.newHashMap(); public TajoConf() { super(); } public TajoConf(Configuration conf) { super(conf); } public TajoConf(Path path) { super(); addResource(path); } public static enum ConfVars { ////////////////////////////////// // Tajo System Configuration ////////////////////////////////// // a username for a running Tajo cluster ROOT_DIR("tajo.rootdir", "file:///tmp/tajo-${user.name}/"), USERNAME("tajo.username", "${user.name}"), // Configurable System Directories WAREHOUSE_DIR("tajo.warehouse.directory", EMPTY_VALUE), STAGING_ROOT_DIR("tajo.staging.directory", "/tmp/tajo-${user.name}/staging"), SYSTEM_CONF_PATH("tajo.system-conf.path", EMPTY_VALUE), SYSTEM_CONF_REPLICA_COUNT("tajo.system-conf.replica-count", 20), // Tajo Master Service Addresses TAJO_MASTER_UMBILICAL_RPC_ADDRESS("tajo.master.umbilical-rpc.address", "localhost:26001"), TAJO_MASTER_CLIENT_RPC_ADDRESS("tajo.master.client-rpc.address", "localhost:26002"), TAJO_MASTER_INFO_ADDRESS("tajo.master.info-http.address", "0.0.0.0:26080"), // QueryMaster resource TAJO_QUERYMASTER_DISK_SLOT("tajo.qm.resource.disk.slots", 0.0f), TAJO_QUERYMASTER_MEMORY_MB("tajo.qm.resource.memory-mb", 512), // Tajo Worker Service Addresses WORKER_INFO_ADDRESS("tajo.worker.info-http.address", "0.0.0.0:28080"), WORKER_QM_INFO_ADDRESS("tajo.worker.qm-info-http.address", "0.0.0.0:28081"), WORKER_PEER_RPC_ADDRESS("tajo.worker.peer-rpc.address", "0.0.0.0:28091"), WORKER_CLIENT_RPC_ADDRESS("tajo.worker.client-rpc.address", "0.0.0.0:28092"), WORKER_QM_RPC_ADDRESS("tajo.worker.qm-rpc.address", "0.0.0.0:28093"), // Tajo Worker Temporal Directories WORKER_TEMPORAL_DIR("tajo.worker.tmpdir.locations", "/tmp/tajo-${user.name}/tmpdir"), WORKER_TEMPORAL_DIR_CLEANUP("tajo.worker.tmpdir.cleanup-at-startup", false), // Tajo Worker Resources WORKER_RESOURCE_AVAILABLE_CPU_CORES("tajo.worker.resource.cpu-cores", 1), WORKER_RESOURCE_AVAILABLE_MEMORY_MB("tajo.worker.resource.memory-mb", 1024), WORKER_RESOURCE_AVAILABLE_DISKS("tajo.worker.resource.disks", 1.0f), WORKER_EXECUTION_MAX_SLOTS("tajo.worker.parallel-execution.max-num", 2), // Tajo Worker Dedicated Resources WORKER_RESOURCE_DEDICATED("tajo.worker.resource.dedicated", false), WORKER_RESOURCE_DEDICATED_MEMORY_RATIO("tajo.worker.resource.dedicated-memory-ratio", 0.8f), // Tajo Worker History WORKER_HISTORY_EXPIRE_PERIOD("tajo.worker.history.expire-interval-minutes", 12 * 60), // 12 hours WORKER_HEARTBEAT_TIMEOUT("tajo.worker.heartbeat.timeout", 120 * 1000), //120 sec // Resource Manager RESOURCE_MANAGER_CLASS("tajo.resource.manager", "org.apache.tajo.master.rm.TajoWorkerResourceManager"), // Catalog CATALOG_ADDRESS("tajo.catalog.client-rpc.address", "localhost:26005"), ////////////////////////////////// // for Yarn Resource Manager ////////////////////////////////// /** how many launching TaskRunners in parallel */ YARN_RM_QUERY_MASTER_MEMORY_MB("tajo.querymaster.memory-mb", 512), YARN_RM_QUERY_MASTER_DISKS("tajo.yarn-rm.querymaster.disks", 1), YARN_RM_TASKRUNNER_LAUNCH_PARALLEL_NUM("tajo.yarn-rm.parallel-task-runner-launcher-num", 16), YARN_RM_WORKER_NUMBER_PER_NODE("tajo.yarn-rm.max-worker-num-per-node", 8), ////////////////////////////////// // Query Configuration ////////////////////////////////// QUERY_SESSION_TIMEOUT("tajo.query.session.timeout-sec", 60), ////////////////////////////////// // Shuffle Configuration ////////////////////////////////// PULLSERVER_PORT("tajo.pullserver.port", 0), SHUFFLE_SSL_ENABLED_KEY("tajo.pullserver.ssl.enabled", false), SHUFFLE_FILE_FORMAT("tajo.shuffle.file-format", "RAW"), SHUFFLE_FETCHER_PARALLEL_EXECUTION_MAX_NUM("tajo.shuffle.fetcher.parallel-execution.max-num", 2), ////////////////////////////////// // Storage Configuration ////////////////////////////////// RAWFILE_SYNC_INTERVAL("rawfile.sync.interval", null), MINIMUM_SPLIT_SIZE("tajo.min.split.size", (long) 1), // for RCFile HIVEUSEEXPLICITRCFILEHEADER("tajo.exec.rcfile.use.explicit.header", true), // for Storage Manager v2 STORAGE_MANAGER_VERSION_2("tajo.storage-manager.v2", false), STORAGE_MANAGER_DISK_SCHEDULER_MAX_READ_BYTES_PER_SLOT("tajo.storage-manager.max-read-bytes", 8 * 1024 * 1024), STORAGE_MANAGER_DISK_SCHEDULER_REPORT_INTERVAL("tajo.storage-manager.disk-scheduler.report-interval", 60 * 1000), STORAGE_MANAGER_CONCURRENCY_PER_DISK("tajo.storage-manager.disk-scheduler.per-disk-concurrency", 2), ////////////////////////////////////////// // Distributed Query Execution Parameters ////////////////////////////////////////// DIST_QUERY_BROADCAST_JOIN_THRESHOLD("tajo.dist-query.join.broadcast.threshold-bytes", (long)5 * 1048576), DIST_QUERY_JOIN_TASK_VOLUME("tajo.dist-query.join.task-volume-mb", 128), DIST_QUERY_SORT_TASK_VOLUME("tajo.dist-query.sort.task-volume-mb", 128), DIST_QUERY_GROUPBY_TASK_VOLUME("tajo.dist-query.groupby.task-volume-mb", 128), DIST_QUERY_JOIN_PARTITION_VOLUME("tajo.dist-query.join.partition-volume-mb", 128), DIST_QUERY_SORT_PARTITION_VOLUME("tajo.dist-query.sort.partition-volume-mb", 256), DIST_QUERY_GROUPBY_PARTITION_VOLUME("tajo.dist-query.groupby.partition-volume-mb", 256), ////////////////////////////////// // Physical Executors ////////////////////////////////// EXECUTOR_EXTERNAL_SORT_THREAD_NUM("tajo.executor.external-sort.thread-num", 1), EXECUTOR_EXTERNAL_SORT_BUFFER_SIZE("tajo.executor.external-sort.buffer-mb", 200), EXECUTOR_EXTERNAL_SORT_FANOUT("tajo.executor.external-sort.fanout-num", 8), EXECUTOR_INNER_JOIN_INMEMORY_HASH_TABLE_SIZE("tajo.executor.join.inner.in-memory-table-num", (long)1000000), EXECUTOR_INNER_JOIN_INMEMORY_HASH_THRESHOLD("tajo.executor.join.inner.in-memory-hash-threshold-bytes", (long)256 * 1048576), EXECUTOR_OUTER_JOIN_INMEMORY_HASH_THRESHOLD("tajo.executor.join.outer.in-memory-hash-threshold-bytes", (long)256 * 1048576), EXECUTOR_GROUPBY_INMEMORY_HASH_THRESHOLD("tajo.executor.groupby.in-memory-hash-threshold-bytes", (long)256 * 1048576), ////////////////////////////////// // RPC ////////////////////////////////// RPC_POOL_MAX_IDLE("tajo.rpc.pool.idle.max", 10), //Internal RPC Client INTERNAL_RPC_CLIENT_WORKER_THREAD_NUM("tajo.internal.rpc.client.worker-thread-num", Runtime.getRuntime().availableProcessors() * 2), //Internal RPC Server MASTER_RPC_SERVER_WORKER_THREAD_NUM("tajo.master.rpc.server.worker-thread-num", Runtime.getRuntime().availableProcessors() * 2), QUERY_MASTER_RPC_SERVER_WORKER_THREAD_NUM("tajo.querymaster.rpc.server.worker-thread-num", Runtime.getRuntime().availableProcessors() * 2), WORKER_RPC_SERVER_WORKER_THREAD_NUM("tajo.worker.rpc.server.worker-thread-num", Runtime.getRuntime().availableProcessors() * 2), CATALOG_RPC_SERVER_WORKER_THREAD_NUM("tajo.catalog.rpc.server.worker-thread-num", Runtime.getRuntime().availableProcessors() * 2), SHUFFLE_RPC_SERVER_WORKER_THREAD_NUM("tajo.shuffle.rpc.server.worker-thread-num", Runtime.getRuntime().availableProcessors() * 2), // Client RPC RPC_CLIENT_WORKER_THREAD_NUM("tajo.rpc.client.worker-thread-num", 4), //Client service RPC Server MASTER_SERVICE_RPC_SERVER_WORKER_THREAD_NUM("tajo.master.service.rpc.server.worker-thread-num", Runtime.getRuntime().availableProcessors() * 1), WORKER_SERVICE_RPC_SERVER_WORKER_THREAD_NUM("tajo.worker.service.rpc.server.worker-thread-num", Runtime.getRuntime().availableProcessors() * 1), ////////////////////////////////// // The Below is reserved ////////////////////////////////// // GeoIP GEOIP_DATA("tajo.geoip.data", ""), ////////////////////////////////// // Hive Configuration ////////////////////////////////// HIVE_QUERY_MODE("tajo.hive.query.mode", false), ////////////////////////////////// // Task Configuration TASK_DEFAULT_MEMORY("tajo.task.memory-slot-mb.default", 512), TASK_DEFAULT_DISK("tajo.task.disk-slot.default", 1.0f), TASK_DEFAULT_SIZE("tajo.task.size-mb", 128), ////////////////////////////////// // Metrics METRICS_PROPERTY_FILENAME("tajo.metrics.property.file", "tajo-metrics.properties"), //CLI CLI_MAX_COLUMN("tajo.cli.max_columns", 120) ; public final String varname; public final String defaultVal; public final int defaultIntVal; public final long defaultLongVal; public final float defaultFloatVal; public final Class<?> valClass; public final boolean defaultBoolVal; private final VarType type; ConfVars(String varname, String defaultVal) { this.varname = varname; this.valClass = String.class; this.defaultVal = defaultVal; this.defaultIntVal = -1; this.defaultLongVal = -1; this.defaultFloatVal = -1; this.defaultBoolVal = false; this.type = VarType.STRING; } ConfVars(String varname, int defaultIntVal) { this.varname = varname; this.valClass = Integer.class; this.defaultVal = Integer.toString(defaultIntVal); this.defaultIntVal = defaultIntVal; this.defaultLongVal = -1; this.defaultFloatVal = -1; this.defaultBoolVal = false; this.type = VarType.INT; } ConfVars(String varname, long defaultLongVal) { this.varname = varname; this.valClass = Long.class; this.defaultVal = Long.toString(defaultLongVal); this.defaultIntVal = -1; this.defaultLongVal = defaultLongVal; this.defaultFloatVal = -1; this.defaultBoolVal = false; this.type = VarType.LONG; } ConfVars(String varname, float defaultFloatVal) { this.varname = varname; this.valClass = Float.class; this.defaultVal = Float.toString(defaultFloatVal); this.defaultIntVal = -1; this.defaultLongVal = -1; this.defaultFloatVal = defaultFloatVal; this.defaultBoolVal = false; this.type = VarType.FLOAT; } ConfVars(String varname, boolean defaultBoolVal) { this.varname = varname; this.valClass = Boolean.class; this.defaultVal = Boolean.toString(defaultBoolVal); this.defaultIntVal = -1; this.defaultLongVal = -1; this.defaultFloatVal = -1; this.defaultBoolVal = defaultBoolVal; this.type = VarType.BOOLEAN; } enum VarType { STRING { void checkType(String value) throws Exception { } }, INT { void checkType(String value) throws Exception { Integer.valueOf(value); } }, LONG { void checkType(String value) throws Exception { Long.valueOf(value); } }, FLOAT { void checkType(String value) throws Exception { Float.valueOf(value); } }, BOOLEAN { void checkType(String value) throws Exception { Boolean.valueOf(value); } }; boolean isType(String value) { try { checkType(value); } catch (Exception e) { return false; } return true; } String typeString() { return name().toUpperCase();} abstract void checkType(String value) throws Exception; } } public static int getIntVar(Configuration conf, ConfVars var) { assert (var.valClass == Integer.class); return conf.getInt(var.varname, var.defaultIntVal); } public static void setIntVar(Configuration conf, ConfVars var, int val) { assert (var.valClass == Integer.class); conf.setInt(var.varname, val); } public int getIntVar(ConfVars var) { return getIntVar(this, var); } public void setIntVar(ConfVars var, int val) { setIntVar(this, var, val); } public static long getLongVar(Configuration conf, ConfVars var) { assert (var.valClass == Long.class); return conf.getLong(var.varname, var.defaultLongVal); } public static long getLongVar(Configuration conf, ConfVars var, long defaultVal) { return conf.getLong(var.varname, defaultVal); } public static void setLongVar(Configuration conf, ConfVars var, long val) { assert (var.valClass == Long.class); conf.setLong(var.varname, val); } public long getLongVar(ConfVars var) { return getLongVar(this, var); } public void setLongVar(ConfVars var, long val) { setLongVar(this, var, val); } public static float getFloatVar(Configuration conf, ConfVars var) { assert (var.valClass == Float.class); return conf.getFloat(var.varname, var.defaultFloatVal); } public static float getFloatVar(Configuration conf, ConfVars var, float defaultVal) { return conf.getFloat(var.varname, defaultVal); } public static void setFloatVar(Configuration conf, ConfVars var, float val) { assert (var.valClass == Float.class); conf.setFloat(var.varname, val); } public float getFloatVar(ConfVars var) { return getFloatVar(this, var); } public void setFloatVar(ConfVars var, float val) { setFloatVar(this, var, val); } public static boolean getBoolVar(Configuration conf, ConfVars var) { assert (var.valClass == Boolean.class); return conf.getBoolean(var.varname, var.defaultBoolVal); } public static boolean getBoolVar(Configuration conf, ConfVars var, boolean defaultVal) { return conf.getBoolean(var.varname, defaultVal); } public static void setBoolVar(Configuration conf, ConfVars var, boolean val) { assert (var.valClass == Boolean.class); conf.setBoolean(var.varname, val); } public boolean getBoolVar(ConfVars var) { return getBoolVar(this, var); } public void setBoolVar(ConfVars var, boolean val) { setBoolVar(this, var, val); } public static String getVar(Configuration conf, ConfVars var) { assert (var.valClass == String.class); return conf.get(var.varname, var.defaultVal); } public static String getVar(Configuration conf, ConfVars var, String defaultVal) { return conf.get(var.varname, defaultVal); } public static void setVar(Configuration conf, ConfVars var, String val) { assert (var.valClass == String.class); conf.set(var.varname, val); } public static ConfVars getConfVars(String name) { return vars.get(name); } public String getVar(ConfVars var) { return getVar(this, var); } public void setVar(ConfVars var, String val) { setVar(this, var, val); } public void logVars(PrintStream ps) { for (ConfVars one : ConfVars.values()) { ps.println(one.varname + "=" + ((get(one.varname) != null) ? get(one.varname) : "")); } } public InetSocketAddress getSocketAddrVar(ConfVars var) { final String address = getVar(var); return NetUtils.createSocketAddr(address); } ///////////////////////////////////////////////////////////////////////////// // Tajo System Specific Methods ///////////////////////////////////////////////////////////////////////////// public static Path getTajoRootDir(TajoConf conf) { String rootPath = conf.getVar(ConfVars.ROOT_DIR); Preconditions.checkNotNull(rootPath, ConfVars.ROOT_DIR.varname + " must be set before a Tajo Cluster starts up"); return new Path(rootPath); } public static Path getWarehouseDir(TajoConf conf) { String warehousePath = conf.getVar(ConfVars.WAREHOUSE_DIR); if (warehousePath == null || warehousePath.equals("")) { Path rootDir = getTajoRootDir(conf); warehousePath = new Path(rootDir, TajoConstants.WAREHOUSE_DIR_NAME).toUri().toString(); conf.setVar(ConfVars.WAREHOUSE_DIR, warehousePath); return new Path(warehousePath); } else { return new Path(warehousePath); } } public static Path getSystemDir(TajoConf conf) { Path rootPath = getTajoRootDir(conf); return new Path(rootPath, TajoConstants.SYSTEM_DIR_NAME); } public static Path getSystemResourceDir(TajoConf conf) { return new Path(getSystemDir(conf), TajoConstants.SYSTEM_RESOURCE_DIR_NAME); } private static boolean hasScheme(String path) { return path.indexOf("file:/") == 0 || path.indexOf("hdfs:/") == 0; } public static Path getStagingDir(TajoConf conf) throws IOException { String stagingDirString = conf.getVar(ConfVars.STAGING_ROOT_DIR); if (!hasScheme(stagingDirString)) { Path warehousePath = getWarehouseDir(conf); FileSystem fs = warehousePath.getFileSystem(conf); Path path = new Path(fs.getUri().toString(), stagingDirString); conf.setVar(ConfVars.STAGING_ROOT_DIR, path.toString()); return path; } return new Path(stagingDirString); } public static Path getSystemConfPath(TajoConf conf) { String systemConfPathStr = conf.getVar(ConfVars.SYSTEM_CONF_PATH); if (systemConfPathStr == null || systemConfPathStr.equals("")) { Path systemResourcePath = getSystemResourceDir(conf); Path systemConfPath = new Path(systemResourcePath, TajoConstants.SYSTEM_CONF_FILENAME); conf.setVar(ConfVars.SYSTEM_CONF_PATH, systemConfPath.toString()); return systemConfPath; } else { return new Path(systemConfPathStr); } } }
package ca.uhn.fhir.jpa.dao.dstu2; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import java.io.IOException; import java.util.Arrays; import org.hl7.fhir.instance.model.api.IIdType; import org.junit.*; import org.junit.Test; import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.model.api.IResource; import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum; import ca.uhn.fhir.model.dstu2.resource.*; import ca.uhn.fhir.model.dstu2.resource.Bundle.Entry; import ca.uhn.fhir.model.dstu2.valueset.ObservationStatusEnum; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.*; import ca.uhn.fhir.rest.server.exceptions.*; import ca.uhn.fhir.util.TestUtil; public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test { private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2ValidateTest.class); @AfterClass public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest(); } @Before public void before() { myDaoConfig.setAllowExternalReferences(true); } @After public void after() { myDaoConfig.setAllowExternalReferences(new DaoConfig().isAllowExternalReferences()); } @Test public void testValidateResourceContainingProfileDeclarationJson() throws Exception { String methodName = "testValidateResourceContainingProfileDeclarationJson"; OperationOutcome outcome = doTestValidateResourceContainingProfileDeclaration(methodName, EncodingEnum.JSON); String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); ourLog.info(ooString); assertThat(ooString, containsString("Element '.subject': minimum required = 1, but only found 0")); assertThat(ooString, containsString("Element encounter @ : max allowed = 0, but found 1")); assertThat(ooString, containsString("Element '.device': minimum required = 1, but only found 0")); } @Test public void testValidateResourceContainingProfileDeclarationXml() throws Exception { String methodName = "testValidateResourceContainingProfileDeclarationXml"; OperationOutcome outcome = doTestValidateResourceContainingProfileDeclaration(methodName, EncodingEnum.XML); String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); ourLog.info(ooString); assertThat(ooString, containsString("Element '/f:Observation.subject': minimum required = 1, but only found 0")); assertThat(ooString, containsString("Element encounter @ /f:Observation: max allowed = 0, but found 1")); assertThat(ooString, containsString("Element '/f:Observation.device': minimum required = 1, but only found 0")); } private OperationOutcome doTestValidateResourceContainingProfileDeclaration(String methodName, EncodingEnum enc) throws IOException { Bundle vss = loadResourceFromClasspath(Bundle.class, "/org/hl7/fhir/instance/model/valueset/valuesets.xml"); myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-status"), mySrd); myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-category"), mySrd); myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-codes"), mySrd); myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-methods"), mySrd); myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-valueabsentreason"), mySrd); myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-interpretation"), mySrd); myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "body-site"), mySrd); myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "referencerange-meaning"), mySrd); myValueSetDao.update((ValueSet) findResourceByIdInBundle(vss, "observation-relationshiptypes"), mySrd); StructureDefinition sd = loadResourceFromClasspath(StructureDefinition.class, "/org/hl7/fhir/instance/model/profile/devicemetricobservation.profile.xml"); sd.setId(new IdDt()); sd.setUrl("http://example.com/foo/bar/" + methodName); myStructureDefinitionDao.create(sd, mySrd); Observation input = new Observation(); ResourceMetadataKeyEnum.PROFILES.put(input, Arrays.asList(new IdDt(sd.getUrl()))); input.addIdentifier().setSystem("http://acme").setValue("12345"); input.getEncounter().setReference("http://foo.com/Encounter/9"); input.setStatus(ObservationStatusEnum.FINAL); input.getCode().addCoding().setSystem("http://loinc.org").setCode("12345"); String encoded = null; MethodOutcome outcome = null; ValidationModeEnum mode = ValidationModeEnum.CREATE; switch (enc) { case JSON: encoded = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(input); ourLog.info(encoded); try { myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd); fail(); } catch (PreconditionFailedException e) { return (OperationOutcome) e.getOperationOutcome(); } case XML: encoded = myFhirCtx.newXmlParser().encodeResourceToString(input); try { myObservationDao.validate(input, null, encoded, EncodingEnum.XML, mode, null, mySrd); fail(); } catch (PreconditionFailedException e) { return (OperationOutcome) e.getOperationOutcome(); } } throw new IllegalStateException(); // shouldn't get here } @Test public void testValidateResourceContainingProfileDeclarationInvalid() throws Exception { String methodName = "testValidateResourceContainingProfileDeclarationInvalid"; Observation input = new Observation(); String profileUri = "http://example.com/" + methodName; ResourceMetadataKeyEnum.PROFILES.put(input, Arrays.asList(new IdDt(profileUri))); input.addIdentifier().setSystem("http://acme").setValue("12345"); input.getEncounter().setReference("http://foo.com/Encounter/9"); input.setStatus(ObservationStatusEnum.FINAL); input.getCode().addCoding().setSystem("http://loinc.org").setCode("12345"); ValidationModeEnum mode = ValidationModeEnum.CREATE; String encoded = myFhirCtx.newJsonParser().encodeResourceToString(input); MethodOutcome outcome = myObservationDao.validate(input, null, encoded, EncodingEnum.JSON, mode, null, mySrd); String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome.getOperationOutcome()); ourLog.info(ooString); assertThat(ooString, containsString("StructureDefinition reference \\\"" + profileUri + "\\\" could not be resolved")); } @Test public void testValidateForCreate() { String methodName = "testValidateForCreate"; Patient pat = new Patient(); pat.setId("Patient/123"); pat.addName().addFamily(methodName); try { myPatientDao.validate(pat, null, null, null, ValidationModeEnum.CREATE, null, mySrd); fail(); } catch (UnprocessableEntityException e) { assertThat(e.getMessage(), containsString("ID must not be populated")); } pat.setId(""); myPatientDao.validate(pat, null, null, null, ValidationModeEnum.CREATE, null, mySrd); } @Test public void testValidateForUpdate() { String methodName = "testValidateForUpdate"; Patient pat = new Patient(); pat.setId("Patient/123"); pat.addName().addFamily(methodName); myPatientDao.validate(pat, null, null, null, ValidationModeEnum.UPDATE, null, mySrd); pat.setId(""); try { myPatientDao.validate(pat, null, null, null, ValidationModeEnum.UPDATE, null, mySrd); fail(); } catch (UnprocessableEntityException e) { assertThat(e.getMessage(), containsString("ID must be populated")); } } @Test public void testValidateForUpdateWithContained() { String methodName = "testValidateForUpdate"; Organization org = new Organization(); org.setId("#123"); Patient pat = new Patient(); pat.setId("Patient/123"); pat.addName().addFamily(methodName); myPatientDao.validate(pat, null, null, null, ValidationModeEnum.UPDATE, null, mySrd); pat.setId(""); try { myPatientDao.validate(pat, null, null, null, ValidationModeEnum.UPDATE, null, mySrd); fail(); } catch (UnprocessableEntityException e) { assertThat(e.getMessage(), containsString("ID must be populated")); } } @Test public void testValidateForDelete() { String methodName = "testValidateForDelete"; Organization org = new Organization(); org.setName(methodName); IIdType orgId = myOrganizationDao.create(org, mySrd).getId().toUnqualifiedVersionless(); Patient pat = new Patient(); pat.addName().addFamily(methodName); pat.getManagingOrganization().setReference(orgId); IIdType patId = myPatientDao.create(pat, mySrd).getId().toUnqualifiedVersionless(); OperationOutcome outcome = null; try { myOrganizationDao.validate(null, orgId, null, null, ValidationModeEnum.DELETE, null, mySrd); fail(); } catch (ResourceVersionConflictException e) { outcome = (OperationOutcome) e.getOperationOutcome(); } String ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); ourLog.info(ooString); assertThat(ooString, containsString("Unable to delete Organization")); pat.setId(patId); pat.getManagingOrganization().setReference(""); myPatientDao.update(pat, mySrd); outcome = (OperationOutcome) myOrganizationDao.validate(null, orgId, null, null, ValidationModeEnum.DELETE, null, mySrd).getOperationOutcome(); ooString = myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(outcome); ourLog.info(ooString); assertThat(ooString, containsString("Ok to delete")); } private IResource findResourceByIdInBundle(Bundle vss, String name) { IResource retVal = null; for (Entry next : vss.getEntry()) { if (next.getResource().getId().getIdPart().equals(name)) { retVal = next.getResource(); break; } } if (retVal == null) { fail("Can't find VS: " + name); } return retVal; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.gradle; import org.elasticsearch.gradle.ElasticsearchDistribution.Flavor; import org.elasticsearch.gradle.ElasticsearchDistribution.Platform; import org.elasticsearch.gradle.ElasticsearchDistribution.Type; import org.elasticsearch.gradle.docker.DockerSupportPlugin; import org.elasticsearch.gradle.docker.DockerSupportService; import org.elasticsearch.gradle.info.BuildParams; import org.elasticsearch.gradle.info.GlobalBuildInfoPlugin; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.GradleException; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.UnknownTaskException; import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.ConfigurationContainer; import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.repositories.IvyArtifactRepository; import org.gradle.api.credentials.HttpHeaderCredentials; import org.gradle.api.file.FileTree; import org.gradle.api.plugins.ExtraPropertiesExtension; import org.gradle.api.provider.Provider; import org.gradle.api.tasks.Sync; import org.gradle.api.tasks.TaskProvider; import org.gradle.authentication.http.HttpHeaderAuthentication; import java.io.File; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.concurrent.Callable; import java.util.function.Supplier; import static org.elasticsearch.gradle.util.Util.capitalize; /** * A plugin to manage getting and extracting distributions of Elasticsearch. * * The source of the distribution could be from a local snapshot, a locally built * bwc snapshot, or the Elastic downloads service. */ public class DistributionDownloadPlugin implements Plugin<Project> { private static final String CONTAINER_NAME = "elasticsearch_distributions"; private static final String FAKE_IVY_GROUP = "elasticsearch-distribution"; private static final String FAKE_SNAPSHOT_IVY_GROUP = "elasticsearch-distribution-snapshot"; private static final String DOWNLOAD_REPO_NAME = "elasticsearch-downloads"; private static final String SNAPSHOT_REPO_NAME = "elasticsearch-snapshots"; private BwcVersions bwcVersions; private NamedDomainObjectContainer<ElasticsearchDistribution> distributionsContainer; @Override public void apply(Project project) { // this is needed for isInternal project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); Provider<DockerSupportService> dockerSupport = GradleUtils.getBuildService( project.getGradle().getSharedServices(), DockerSupportPlugin.DOCKER_SUPPORT_SERVICE_NAME ); distributionsContainer = project.container(ElasticsearchDistribution.class, name -> { Configuration fileConfiguration = project.getConfigurations().create("es_distro_file_" + name); Configuration extractedConfiguration = project.getConfigurations().create("es_distro_extracted_" + name); return new ElasticsearchDistribution(name, project.getObjects(), dockerSupport, fileConfiguration, extractedConfiguration); }); project.getExtensions().add(CONTAINER_NAME, distributionsContainer); setupDownloadServiceRepo(project); if (BuildParams.isInternal()) { ExtraPropertiesExtension extraProperties = project.getExtensions().getExtraProperties(); this.bwcVersions = (BwcVersions) extraProperties.get("bwcVersions"); } project.afterEvaluate(this::setupDistributions); } @SuppressWarnings("unchecked") public static NamedDomainObjectContainer<ElasticsearchDistribution> getContainer(Project project) { return (NamedDomainObjectContainer<ElasticsearchDistribution>) project.getExtensions().getByName(CONTAINER_NAME); } // pkg private for tests void setupDistributions(Project project) { for (ElasticsearchDistribution distribution : distributionsContainer) { distribution.finalizeValues(); DependencyHandler dependencies = project.getDependencies(); // for the distribution as a file, just depend on the artifact directly dependencies.add(distribution.configuration.getName(), dependencyNotation(project, distribution)); // no extraction allowed for rpm, deb or docker if (distribution.getType().shouldExtract()) { // for the distribution extracted, add a root level task that does the extraction, and depend on that // extracted configuration as an artifact consisting of the extracted distribution directory dependencies.add( distribution.getExtracted().configuration.getName(), projectDependency(project, ":", configName("extracted_elasticsearch", distribution)) ); // ensure a root level download task exists setupRootDownload(project.getRootProject(), distribution); } } } private void setupRootDownload(Project rootProject, ElasticsearchDistribution distribution) { String extractTaskName = extractTaskName(distribution); // NOTE: this is *horrendous*, but seems to be the only way to check for the existence of a registered task try { rootProject.getTasks().named(extractTaskName); // already setup this version return; } catch (UnknownTaskException e) { // fall through: register the task } setupDownloadServiceRepo(rootProject); final ConfigurationContainer configurations = rootProject.getConfigurations(); String downloadConfigName = configName("elasticsearch", distribution); String extractedConfigName = "extracted_" + downloadConfigName; final Configuration downloadConfig = configurations.create(downloadConfigName); configurations.create(extractedConfigName); rootProject.getDependencies().add(downloadConfigName, dependencyNotation(rootProject, distribution)); // add task for extraction, delaying resolving config until runtime if (distribution.getType() == Type.ARCHIVE || distribution.getType() == Type.INTEG_TEST_ZIP) { Supplier<File> archiveGetter = downloadConfig::getSingleFile; String extractDir = rootProject.getBuildDir().toPath().resolve("elasticsearch-distros").resolve(extractedConfigName).toString(); TaskProvider<Sync> extractTask = rootProject.getTasks().register(extractTaskName, Sync.class, syncTask -> { syncTask.dependsOn(downloadConfig); syncTask.into(extractDir); syncTask.from((Callable<FileTree>) () -> { File archiveFile = archiveGetter.get(); String archivePath = archiveFile.toString(); if (archivePath.endsWith(".zip")) { return rootProject.zipTree(archiveFile); } else if (archivePath.endsWith(".tar.gz")) { return rootProject.tarTree(rootProject.getResources().gzip(archiveFile)); } throw new IllegalStateException("unexpected file extension on [" + archivePath + "]"); }); }); rootProject.getArtifacts() .add( extractedConfigName, rootProject.getLayout().getProjectDirectory().dir(extractDir), artifact -> artifact.builtBy(extractTask) ); } } private static void addIvyRepo(Project project, String name, String url, String group) { IvyArtifactRepository ivyRepo = project.getRepositories().ivy(repo -> { repo.setName(name); repo.setUrl(url); repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); // this header is not a credential but we hack the capability to send this header to avoid polluting our download stats repo.credentials(HttpHeaderCredentials.class, creds -> { creds.setName("X-Elastic-No-KPI"); creds.setValue("1"); }); repo.getAuthentication().create("header", HttpHeaderAuthentication.class); repo.patternLayout(layout -> layout.artifact("/downloads/elasticsearch/[module]-[revision](-[classifier]).[ext]")); }); project.getRepositories().exclusiveContent(exclusiveContentRepository -> { exclusiveContentRepository.filter(config -> config.includeGroup(group)); exclusiveContentRepository.forRepositories(ivyRepo); }); } private static void setupDownloadServiceRepo(Project project) { if (project.getRepositories().findByName(DOWNLOAD_REPO_NAME) != null) { return; } addIvyRepo(project, DOWNLOAD_REPO_NAME, "https://artifacts.elastic.co", FAKE_IVY_GROUP); if (BuildParams.isInternal() == false) { // external, so add snapshot repo as well addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://snapshots.elastic.co", FAKE_SNAPSHOT_IVY_GROUP); } } /** * Returns a dependency object representing the given distribution. * * The returned object is suitable to be passed to {@link DependencyHandler}. * The concrete type of the object will either be a project {@link Dependency} or * a set of maven coordinates as a {@link String}. Project dependencies point to * a project in the Elasticsearch repo either under `:distribution:bwc`, * `:distribution:archives` or :distribution:packages`. Maven coordinates point to * either the integ-test-zip coordinates on maven central, or a set of artificial * coordinates that resolve to the Elastic download service through an ivy repository. */ private Object dependencyNotation(Project project, ElasticsearchDistribution distribution) { if (BuildParams.isInternal()) { // non-external project, so depend on local build if (VersionProperties.getElasticsearch().equals(distribution.getVersion())) { return projectDependency(project, distributionProjectPath(distribution), "default"); } BwcVersions.UnreleasedVersionInfo unreleasedInfo = bwcVersions.unreleasedInfo(Version.fromString(distribution.getVersion())); if (unreleasedInfo != null) { assert distribution.getBundledJdk(); return projectDependency(project, unreleasedInfo.gradleProjectPath, distributionProjectName(distribution)); } } if (distribution.getType() == Type.INTEG_TEST_ZIP) { return "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + distribution.getVersion() + "@zip"; } Version distroVersion = Version.fromString(distribution.getVersion()); String extension = distribution.getType().toString(); String classifier = ":x86_64"; if (distribution.getType() == Type.ARCHIVE) { extension = distribution.getPlatform() == Platform.WINDOWS ? "zip" : "tar.gz"; if (distroVersion.onOrAfter("7.0.0")) { classifier = ":" + distribution.getPlatform() + "-x86_64"; } else { classifier = ""; } } else if (distribution.getType() == Type.DEB) { classifier = ":amd64"; } String flavor = ""; if (distribution.getFlavor() == Flavor.OSS && distroVersion.onOrAfter("6.3.0")) { flavor = "-oss"; } String group = distribution.getVersion().endsWith("-SNAPSHOT") ? FAKE_SNAPSHOT_IVY_GROUP : FAKE_IVY_GROUP; return group + ":elasticsearch" + flavor + ":" + distribution.getVersion() + classifier + "@" + extension; } private static Dependency projectDependency(Project project, String projectPath, String projectConfig) { if (project.findProject(projectPath) == null) { throw new GradleException("no project [" + projectPath + "], project names: " + project.getRootProject().getAllprojects()); } Map<String, Object> depConfig = new HashMap<>(); depConfig.put("path", projectPath); depConfig.put("configuration", projectConfig); return project.getDependencies().project(depConfig); } private static String distributionProjectPath(ElasticsearchDistribution distribution) { String projectPath = ":distribution"; switch (distribution.getType()) { case INTEG_TEST_ZIP: projectPath += ":archives:integ-test-zip"; break; case DOCKER: projectPath += ":docker:"; projectPath += distributionProjectName(distribution); break; default: projectPath += distribution.getType() == Type.ARCHIVE ? ":archives:" : ":packages:"; projectPath += distributionProjectName(distribution); break; } return projectPath; } /** * Works out the gradle project name that provides a distribution artifact. * * @param distribution the distribution from which to derive a project name * @return the name of a project. It is not the full project path, only the name. */ private static String distributionProjectName(ElasticsearchDistribution distribution) { Platform platform = distribution.getPlatform(); Architecture architecture = distribution.getArchitecture(); String projectName = ""; final String archString = platform == Platform.WINDOWS || architecture == Architecture.X64 ? "" : "-" + architecture.toString().toLowerCase(); if (distribution.getFlavor() == Flavor.OSS) { projectName += "oss-"; } if (distribution.getBundledJdk() == false) { projectName += "no-jdk-"; } switch (distribution.getType()) { case ARCHIVE: projectName += platform.toString() + archString + (platform == Platform.WINDOWS ? "-zip" : "-tar"); break; case DOCKER: projectName += "docker" + archString + "-export"; break; default: projectName += distribution.getType(); break; } return projectName; } private static String configName(String prefix, ElasticsearchDistribution distribution) { return String.format( Locale.ROOT, "%s_%s_%s_%s%s%s", prefix, distribution.getVersion(), distribution.getType(), distribution.getPlatform() == null ? "" : distribution.getPlatform() + "_", distribution.getFlavor(), distribution.getBundledJdk() ? "" : "_nojdk" ); } private static String extractTaskName(ElasticsearchDistribution distribution) { String taskName = "extractElasticsearch"; if (distribution.getType() != Type.INTEG_TEST_ZIP) { if (distribution.getFlavor() == Flavor.OSS) { taskName += "Oss"; } if (distribution.getBundledJdk() == false) { taskName += "NoJdk"; } } if (distribution.getType() == Type.ARCHIVE) { taskName += capitalize(distribution.getPlatform().toString()); } else if (distribution.getType() != Type.INTEG_TEST_ZIP) { taskName += capitalize(distribution.getType().toString()); } taskName += distribution.getVersion(); return taskName; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package freemarker.template.utility; import java.math.BigDecimal; import java.math.BigInteger; import org.junit.Test; import junit.framework.TestCase; public class NumberUtilTest extends TestCase { @Test public void testGetSignum() { assertEquals(1, NumberUtil.getSignum(Double.valueOf(Double.POSITIVE_INFINITY))); assertEquals(1, NumberUtil.getSignum(Double.valueOf(3))); assertEquals(0, NumberUtil.getSignum(Double.valueOf(0))); assertEquals(-1, NumberUtil.getSignum(Double.valueOf(-3))); assertEquals(-1, NumberUtil.getSignum(Double.valueOf(Double.NEGATIVE_INFINITY))); try { NumberUtil.getSignum(Double.valueOf(Double.NaN)); fail(); } catch (ArithmeticException e) { // expected } assertEquals(1, NumberUtil.getSignum(Float.valueOf(Float.POSITIVE_INFINITY))); assertEquals(1, NumberUtil.getSignum(Float.valueOf(3))); assertEquals(0, NumberUtil.getSignum(Float.valueOf(0))); assertEquals(-1, NumberUtil.getSignum(Float.valueOf(-3))); assertEquals(-1, NumberUtil.getSignum(Float.valueOf(Float.NEGATIVE_INFINITY))); try { NumberUtil.getSignum(Float.valueOf(Float.NaN)); fail(); } catch (ArithmeticException e) { // expected } assertEquals(1, NumberUtil.getSignum(Long.valueOf(3))); assertEquals(0, NumberUtil.getSignum(Long.valueOf(0))); assertEquals(-1, NumberUtil.getSignum(Long.valueOf(-3))); assertEquals(1, NumberUtil.getSignum(Integer.valueOf(3))); assertEquals(0, NumberUtil.getSignum(Integer.valueOf(0))); assertEquals(-1, NumberUtil.getSignum(Integer.valueOf(-3))); assertEquals(1, NumberUtil.getSignum(Short.valueOf((short) 3))); assertEquals(0, NumberUtil.getSignum(Short.valueOf((short) 0))); assertEquals(-1, NumberUtil.getSignum(Short.valueOf((short) -3))); assertEquals(1, NumberUtil.getSignum(Byte.valueOf((byte) 3))); assertEquals(0, NumberUtil.getSignum(Byte.valueOf((byte) 0))); assertEquals(-1, NumberUtil.getSignum(Byte.valueOf((byte) -3))); assertEquals(1, NumberUtil.getSignum(BigDecimal.valueOf(3))); assertEquals(0, NumberUtil.getSignum(BigDecimal.valueOf(0))); assertEquals(-1, NumberUtil.getSignum(BigDecimal.valueOf(-3))); assertEquals(1, NumberUtil.getSignum(BigInteger.valueOf(3))); assertEquals(0, NumberUtil.getSignum(BigInteger.valueOf(0))); assertEquals(-1, NumberUtil.getSignum(BigInteger.valueOf(-3))); } @Test public void testIsBigDecimalInteger() { BigDecimal n1 = new BigDecimal("1.125"); if (n1.precision() != 4 || n1.scale() != 3) { throw new RuntimeException("Wrong: " + n1); } BigDecimal n2 = new BigDecimal("1.125").subtract(new BigDecimal("0.005")); if (n2.precision() != 4 || n2.scale() != 3) { throw new RuntimeException("Wrong: " + n2); } BigDecimal n3 = new BigDecimal("123"); BigDecimal n4 = new BigDecimal("6000"); BigDecimal n5 = new BigDecimal("1.12345").subtract(new BigDecimal("0.12345")); if (n5.precision() != 6 || n5.scale() != 5) { throw new RuntimeException("Wrong: " + n5); } BigDecimal n6 = new BigDecimal("0"); BigDecimal n7 = new BigDecimal("0.001").subtract(new BigDecimal("0.001")); BigDecimal n8 = new BigDecimal("60000.5").subtract(new BigDecimal("0.5")); BigDecimal n9 = new BigDecimal("6").movePointRight(3).setScale(-3); BigDecimal[] ns = new BigDecimal[] { n1, n2, n3, n4, n5, n6, n7, n8, n9, n1.negate(), n2.negate(), n3.negate(), n4.negate(), n5.negate(), n6.negate(), n7.negate(), n8.negate(), n9.negate(), }; for (BigDecimal n : ns) { assertEquals(n.doubleValue() == n.longValue(), NumberUtil.isIntegerBigDecimal(n)); } } @Test public void testToIntExcact() { for (int n : new int[] { Integer.MIN_VALUE, Byte.MIN_VALUE, -1, 0, 1, Byte.MAX_VALUE, Integer.MAX_VALUE }) { if (n != Integer.MIN_VALUE && n != Integer.MAX_VALUE) { assertEquals(n, NumberUtil.toIntExact(Byte.valueOf((byte) n))); assertEquals(n, NumberUtil.toIntExact(Short.valueOf((short) n))); assertEquals(n, NumberUtil.toIntExact(Float.valueOf(n))); } assertEquals(n, NumberUtil.toIntExact(Integer.valueOf(n))); assertEquals(n, NumberUtil.toIntExact(Long.valueOf(n))); assertEquals(n, NumberUtil.toIntExact(Double.valueOf(n))); assertEquals(n, NumberUtil.toIntExact(BigDecimal.valueOf(n))); assertEquals(n, NumberUtil.toIntExact(BigDecimal.valueOf(n * 10L).divide(BigDecimal.TEN))); assertEquals(n, NumberUtil.toIntExact(BigInteger.valueOf(n))); } try { NumberUtil.toIntExact(Long.valueOf(Integer.MIN_VALUE - 1L)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(Long.valueOf(Integer.MAX_VALUE + 1L)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(Float.valueOf(1.00001f)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(Float.valueOf(Integer.MIN_VALUE - 129L)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(Float.valueOf(Integer.MAX_VALUE)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(Double.valueOf(1.00001)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(Double.valueOf(Integer.MIN_VALUE - 1L)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(Double.valueOf(Integer.MAX_VALUE + 1L)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(new BigDecimal("100.000001")); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(BigDecimal.valueOf(Integer.MIN_VALUE - 1L)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(BigDecimal.valueOf(Integer.MAX_VALUE + 1L)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(BigInteger.valueOf(Integer.MIN_VALUE - 1L)); fail(); } catch (ArithmeticException e) { // Expected } try { NumberUtil.toIntExact(BigInteger.valueOf(Integer.MAX_VALUE + 1L)); fail(); } catch (ArithmeticException e) { // Expected } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.mysql.implementation; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.mysql.fluent.ServerBasedPerformanceTiersClient; import com.azure.resourcemanager.mysql.fluent.models.PerformanceTierPropertiesInner; import com.azure.resourcemanager.mysql.models.PerformanceTierListResult; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in ServerBasedPerformanceTiersClient. */ public final class ServerBasedPerformanceTiersClientImpl implements ServerBasedPerformanceTiersClient { private final ClientLogger logger = new ClientLogger(ServerBasedPerformanceTiersClientImpl.class); /** The proxy service used to perform REST calls. */ private final ServerBasedPerformanceTiersService service; /** The service client containing this operation class. */ private final MySqlManagementClientImpl client; /** * Initializes an instance of ServerBasedPerformanceTiersClientImpl. * * @param client the instance of the service client containing this operation class. */ ServerBasedPerformanceTiersClientImpl(MySqlManagementClientImpl client) { this.service = RestProxy .create( ServerBasedPerformanceTiersService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for MySqlManagementClientServerBasedPerformanceTiers to be used by the * proxy service to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "MySqlManagementClien") private interface ServerBasedPerformanceTiersService { @Headers({"Content-Type: application/json"}) @Get( "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers" + "/{serverName}/performanceTiers") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<PerformanceTierListResult>> list( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @PathParam("resourceGroupName") String resourceGroupName, @PathParam("serverName") String serverName, @HeaderParam("Accept") String accept, Context context); } /** * List all the performance tiers for a MySQL server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of performance tiers along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<PerformanceTierPropertiesInner>> listSinglePageAsync( String resourceGroupName, String serverName) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (serverName == null) { return Mono.error(new IllegalArgumentException("Parameter serverName is required and cannot be null.")); } final String apiVersion = "2017-12-01"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .list( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, serverName, accept, context)) .<PagedResponse<PerformanceTierPropertiesInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * List all the performance tiers for a MySQL server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of performance tiers along with {@link PagedResponse} on successful completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<PerformanceTierPropertiesInner>> listSinglePageAsync( String resourceGroupName, String serverName, Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } if (resourceGroupName == null) { return Mono .error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.")); } if (serverName == null) { return Mono.error(new IllegalArgumentException("Parameter serverName is required and cannot be null.")); } final String apiVersion = "2017-12-01"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .list( this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), resourceGroupName, serverName, accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), null, null)); } /** * List all the performance tiers for a MySQL server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of performance tiers. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<PerformanceTierPropertiesInner> listAsync(String resourceGroupName, String serverName) { return new PagedFlux<>(() -> listSinglePageAsync(resourceGroupName, serverName)); } /** * List all the performance tiers for a MySQL server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of performance tiers. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<PerformanceTierPropertiesInner> listAsync( String resourceGroupName, String serverName, Context context) { return new PagedFlux<>(() -> listSinglePageAsync(resourceGroupName, serverName, context)); } /** * List all the performance tiers for a MySQL server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of performance tiers. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<PerformanceTierPropertiesInner> list(String resourceGroupName, String serverName) { return new PagedIterable<>(listAsync(resourceGroupName, serverName)); } /** * List all the performance tiers for a MySQL server. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a list of performance tiers. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<PerformanceTierPropertiesInner> list( String resourceGroupName, String serverName, Context context) { return new PagedIterable<>(listAsync(resourceGroupName, serverName, context)); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package canaryeds.builder; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import static java.lang.Math.*; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.SimpleTimeZone; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; /** * * @author dbhart */ public class Analyzer { private static final Logger LOG = Logger.getLogger(Analyzer.class.getName()); private ArrayList<String> tags = null; private int stepColumn = -1; private int tagColumn = -1; private int valueColumn = -1; private int qualityColumn = -1; private FileFormat dataFormat = FileFormat.SPREADSHEET; private String stepFormat = "MM/dd/yyyy HH:mm:ss"; private char fieldSep = ','; private File dataFile = null; private ArrayList<String> dataLines = new ArrayList(); private int numHeaderLines = 0; private long stepMin = Long.MAX_VALUE; private long stepMax = Long.MIN_VALUE; private long stepDel = Long.MAX_VALUE; private String stepFieldName = null; private ArrayList<SimpleChannelDefn> channelDefs = null; public class SimpleChannelDefn { @Override public String toString() { return valType + " Field \'" + name + "\': [" + minValue + ", " + delValue + ", " + maxValue + "]"; } public SimpleChannelDefn(String name, FieldFormat valType, double minValue, double maxValue, double delValue) { this.name = name; this.valType = valType; this.minValue = minValue; this.maxValue = maxValue; this.delValue = delValue; } public String name; public FieldFormat valType; public double minValue; public double maxValue; public double delValue; } /** * Get the value of numHeaderLines * * @return the value of numHeaderLines */ public int getNumHeaderLines() { return numHeaderLines; } /** * Set the value of numHeaderLines * * @param numHeaderLines new value of numHeaderLines */ public void setNumHeaderLines(int numHeaderLines) { if (this.numHeaderLines != numHeaderLines) { this.numHeaderLines = numHeaderLines; LOG.info("Set number of header lines (before column titles): " + numHeaderLines); } } public int getTagColumn() { return tagColumn; } public void setTagColumn(int tagColumn) { if (this.tagColumn != tagColumn) { if (tagColumn < 0 && this.tagColumn >= 0) { LOG.info("Tag column unset"); } else if (tagColumn >= 0) { LOG.info("Tag contained in column: " + tagColumn); } this.tagColumn = tagColumn; } } public int getValueColumn() { return valueColumn; } public void setValueColumn(int valueColumn) { if (this.valueColumn != valueColumn) { if (valueColumn < 0 && this.valueColumn >= 0) { LOG.info("Value column unset"); } else if (valueColumn >= 0) { LOG.info("Value contained in column: " + valueColumn); } this.valueColumn = valueColumn; } } public int getQualityColumn() { return qualityColumn; } public void setQualityColumn(int qualityColumn) { if (this.qualityColumn != qualityColumn) { if (qualityColumn < 0 && this.qualityColumn >= 0) { LOG.info("Quality column unset"); } else if (qualityColumn >= 0) { LOG.info("Quality contained in column: " + qualityColumn); } this.qualityColumn = qualityColumn; } } /** * Get the value of dataFile * * @return the value of dataFile */ public File getDataFile() { return dataFile; } /** * Set the value of dataFile * * @param dataFile new value of dataFile */ public void setDataFile(File dataFile) { try { this.dataFile = dataFile; this.dataLines = new ArrayList(); LOG.info("Set data file to: " + dataFile.getName()); FileReader inFile; BufferedReader reader; try { inFile = new FileReader(dataFile); } catch (FileNotFoundException ex) { LOG.log(Level.SEVERE, "File not found (that's weird ...)", ex); return; } reader = new BufferedReader(inFile); String line = ""; while (line != null) { try { line = reader.readLine(); if (line != null) { this.dataLines.add(line); } } catch (IOException ex) { LOG.log(Level.SEVERE, "Error reading lines from file: " + dataFile.getName(), ex); line = null; } } LOG.info("Read " + this.dataLines.size() + " lines from file: " + dataFile.getName()); reader.close(); inFile.close(); } catch (Exception ex) { LOG.log(Level.SEVERE, "Failed to open file for analysis: " + dataFile.getName(), ex); } } public String[] getFields(int line) { if (this.dataLines.isEmpty()) { return null; } String sep = new String() + this.getFieldSep(); return this.dataLines.get(line).split(sep); } /** * Get the value of fieldSep * * @return the value of fieldSep */ public char getFieldSep() { return fieldSep; } /** * Set the value of fieldSep * * @param fieldSep new value of fieldSep */ public void setFieldSep(char fieldSep) { this.fieldSep = fieldSep; LOG.info("Field separator set to: " + fieldSep); } /** * Get the value of stepFormat * * @return the value of stepFormat */ public String getStepFormat() { return stepFormat; } /** * Set the value of stepFormat * * @param stepFormat new value of stepFormat */ public void setStepFormat(String stepFormat) { this.stepFormat = stepFormat; LOG.info("Step format set to: " + stepFormat); } /** * Get the value of dataFormat * * @return the value of dataFormat */ public FileFormat getDataFormat() { return dataFormat; } /** * Set the value of dataFormat * * @param dataFormat new value of dataFormat */ public void setDataFormat(FileFormat dataFormat) { if (this.dataFormat != dataFormat) { this.dataFormat = dataFormat; LOG.info("Data format set to: " + dataFormat.toString()); } } /** * Get the value of stepColumn * * @return the value of stepColumn */ public int getStepColumn() { return stepColumn; } /** * Set the value of stepColumn * * @param stepColumn new value of stepColumn */ public void setStepColumn(int stepColumn) { if (this.stepColumn != stepColumn && stepColumn >= 0) { this.stepColumn = stepColumn; LOG.info("Step contained in column: " + stepColumn); } } /** * Get the value of tags * * @return the value of tags */ public ArrayList<String> getTags() { return tags; } /** * Set the value of tags * * @param tags new value of tags */ public void setTags(ArrayList<String> tags) { this.tags = tags; } void setLogHandler(TextAreaHandler handler) { LOG.addHandler(handler); } /** * The dataFormat of the data file to build from. */ public enum FileFormat { /** * One column per tag, one row per step. */ SPREADSHEET, /** * One row per tag per step. */ TABLE, } public enum FieldFormat { UNKNOWN, DATE, INTEGER, DOUBLE, STRING, } /** * Analyze the file to set up the configuration options. */ public boolean analyze() { // TODO code application logic here // Request CSV file to process // Ask what dataFormat (wide sheet or long table) // Ask about header rows (or assume there are 0 header rows before the titles) // Ask about title row (or assume the first row) // Read the title row, creating a HashMap of blank ArrayList based on tag channelDefs = new ArrayList(); HashMap<String, ArrayList<Double>> myValues = new HashMap(); ArrayList<Date> mySteps = new ArrayList(); SimpleDateFormat myStepFormat = new SimpleDateFormat(stepFormat); FieldFormat[] myFieldFormats = new FieldFormat[0]; String[] fieldNames = getFields(numHeaderLines); switch (dataFormat) { case SPREADSHEET: String[] fieldValues = getFields(numHeaderLines); if (fieldValues.length < 2) { LOG.severe("Your list of tags seems too short. Did you select the right file/format? Aborting ..."); return false; } if (fieldValues.length < stepColumn || stepColumn < 0) { LOG.severe("You selected a column that does not exist for your Step field. Aborting ..."); return false; } String stepField = fieldValues[stepColumn]; stepFieldName = stepField; for (int iTag = 0; iTag < fieldValues.length; iTag++) { if (iTag == stepColumn) { continue; } myValues.put(fieldValues[iTag], new ArrayList<Double>()); } myFieldFormats = new FieldFormat[fieldValues.length]; for (int iField = 0; iField < fieldValues.length; iField++) { if (iField == stepColumn) { myFieldFormats[iField] = FieldFormat.DATE; } else { myFieldFormats[iField] = FieldFormat.UNKNOWN; } } for (int iLine = numHeaderLines + 1; iLine < dataLines.size(); iLine++) { fieldValues = getFields(iLine); String stepVal = fieldValues[stepColumn]; try { Date stepDate = myStepFormat.parse(stepVal); mySteps.add(stepDate); } catch (ParseException ex) { LOG.log(Level.SEVERE, "Error parsing Step as Date on line " + (iLine + 1) + ": " + stepVal, ex); return false; } for (int jTag = 0; jTag < fieldValues.length; jTag++) { if (jTag == stepColumn) { continue; } String value; FieldFormat curFormat; value = fieldValues[jTag].toLowerCase(); if (value.contains("nan") || value.contains("#n/a") || value.contains("null") || value.contains("none") || value.contains("na")) { continue; } curFormat = myFieldFormats[jTag]; Object curValue = null; if (value.length() > 0) { try { Integer testInt = Integer.parseInt(value); if (curFormat != FieldFormat.DOUBLE) { myFieldFormats[jTag] = FieldFormat.INTEGER; } myValues.get(fieldNames[jTag]).add(testInt.doubleValue()); } catch (NumberFormatException ex) { try { Double testDouble = Double.parseDouble(value); myFieldFormats[jTag] = FieldFormat.DOUBLE; myValues.get(fieldNames[jTag]).add(testDouble.doubleValue()); } catch (NumberFormatException ex2) { if (curFormat == FieldFormat.UNKNOWN) { if (value.contains("a") || value.contains("\"") || value.contains("\'") || value.contains("i") || value.contains("o") || value.contains("u")) { myFieldFormats[jTag] = FieldFormat.STRING; LOG.info("Field " + jTag + " is a STRING field based on the contents: ''" + value + "''"); } } } } } } } // Ask for the Step type and dataFormat (or interpret, from the current locale) // Create a HashMap of <String tag, ArrayList<String> values> // Read all values from the file } for (int iFld = 0; iFld < myFieldFormats.length; iFld++) { FieldFormat curFormat = myFieldFormats[iFld]; if (curFormat == FieldFormat.UNKNOWN) { curFormat = FieldFormat.STRING; } //LOG.info("Field " + iFld + " is a " + myFieldFormats[iFld] + " field."); // Do analysis of values if (curFormat == FieldFormat.DATE) { long firstStep; long lastStep; long prevStep; long thisStep; long delta = Long.MAX_VALUE; firstStep = mySteps.get(0).getTime(); thisStep = firstStep; lastStep = firstStep; for (Iterator it = mySteps.iterator(); it.hasNext();) { prevStep = thisStep; Date nextStep = (Date) it.next(); thisStep = nextStep.getTime(); lastStep = thisStep; if ((abs(thisStep - prevStep) > 0)) { delta = min(abs(thisStep - prevStep), delta); } } stepMin = firstStep; stepMax = lastStep; stepDel = delta; SimpleChannelDefn stepChannel = new SimpleChannelDefn(fieldNames[iFld], FieldFormat.DATE, firstStep, lastStep, delta); LOG.info("Timing definition: " + stepChannel.toString()); } else if (curFormat == FieldFormat.DOUBLE || curFormat == FieldFormat.INTEGER) { double minValue; double maxValue; double epsValue; double lastVal; double resVal; DescriptiveStatistics rawValues = new DescriptiveStatistics(); DescriptiveStatistics residuals = new DescriptiveStatistics(); ArrayList<Double> values = myValues.get(fieldNames[iFld]); try { lastVal = values.get(0); for (Double val : values) { resVal = abs(val - lastVal); rawValues.addValue(val.doubleValue()); if (resVal > 0) { residuals.addValue(resVal); } } } catch (Exception E) { } minValue = rawValues.getMin(); maxValue = rawValues.getMax(); epsValue = residuals.getMin(); SimpleChannelDefn myChannel = new SimpleChannelDefn(fieldNames[iFld], myFieldFormats[iFld], minValue, maxValue, epsValue); LOG.info("DataChannel definition: " + myChannel.toString()); channelDefs.add(myChannel); } else { LOG.warning("Skipping tag \'" + fieldNames[iFld] + "\' because it has no numeric data or is string data"); } } // Process each ArrayList of values, determining the following things: // aside from NA, NaN, #N/A, null, or other non-number strings, is this a string field // if this is a string field, can we get a set of valid strings? // is this an integer or a double valued field (look for exponential dataFormat, decimal points) // smallest non-zero difference (sigma_0) // minimum and maximum values (vr_min, vr_max) return true; } public HashMap getV4Config() { /* run mode: BATCH control type: INTERNAL control messenger: null driver files: null */ HashMap cfgCanary = new HashMap(); cfgCanary.put("run mode", "BATCH"); cfgCanary.put("control type", "INTERNAL"); cfgCanary.put("control messenger", null); cfgCanary.put("driver files", null); /* timing options: dynamic start-stop: off date-time format: mm/dd/yyyy HH:MM:SS date-time start: 02/21/2006 00:00:00 date-time stop: 04/30/2006 23:40:00 data interval: 00:20:00 message interval: 00:00:01 */ HashMap cfgTiming = new HashMap(); String matlabStepFormat = stepFormat; matlabStepFormat = matlabStepFormat.replaceAll("m", "P"); matlabStepFormat = matlabStepFormat.replaceAll("M", "m"); matlabStepFormat = matlabStepFormat.replaceAll("P", "M"); matlabStepFormat = matlabStepFormat.replaceAll("s", "S"); cfgTiming.put("dynamic start-stop", false); cfgTiming.put("date-time format", matlabStepFormat); SimpleDateFormat df = new SimpleDateFormat(stepFormat); cfgTiming.put("date-time start", df.format(new Date(this.stepMin))); cfgTiming.put("date-time stop", df.format(new Date(this.stepMax))); df = new SimpleDateFormat("HH:mm:ss"); df.setTimeZone(new SimpleTimeZone(0, "UTC")); cfgTiming.put("data interval", df.format(new Date(this.stepDel))); cfgTiming.put("message interval", "00:00:01"); /* data sources: - id: csvfile type : csv location : ../sample_data/test_station_d.csv enabled : yes timestep options: field: "TIME_STEP" dataFormat: "mm/dd/yyyy HH:MM" */ ArrayList cfgDataSources = new ArrayList(); HashMap hmDS = new HashMap(); hmDS.put("id", "csvfile"); hmDS.put("type", "csv"); hmDS.put("location", dataFile.getName()); hmDS.put("enabled", true); HashMap dsTO = new HashMap(); hmDS.put("timestep options", dsTO); dsTO.put("field", stepFieldName); dsTO.put("format", matlabStepFormat); cfgDataSources.add(hmDS); /* * signals: * - id: TEST_CL * * SCADA tag: D_CL2_VAL * * evaluation type: wq * * parameter type: CL2 * * ignore changes: none * * data options: * * * precision: 0.0035 * * * units: 'Mg/L' * * * valid range: [0.01, 5] * * * set points: [-.inf, 3] */ ArrayList cfgSignals = new ArrayList(); for (SimpleChannelDefn defn : channelDefs) { HashMap sigHM = new HashMap(); sigHM.put("id", defn.name); sigHM.put("SCADA tag", defn.name); sigHM.put("evaluation type", "wq"); sigHM.put("parameter type", "UNKN"); sigHM.put("ignore changes", "none"); HashMap datOpt = new HashMap(); String description = new String(); description = "Type=" + defn.valType.toString() + ", "; if (defn.valType == FieldFormat.DOUBLE) { description += String.format("min=%.3f, max=%.3f, delta=%.3g", defn.minValue, defn.maxValue, defn.delValue); } else if (defn.valType == FieldFormat.INTEGER) { description += String.format("min=%d, max=%d, delta=%d", new Double(defn.minValue).intValue(), new Double(defn.maxValue).intValue(), new Double(defn.delValue).intValue()); } sigHM.put("description", description); sigHM.put("data options", datOpt); defn.delValue = max(defn.delValue, 0.01); datOpt.put("precision", defn.delValue); ArrayList<Double> valRng = new ArrayList(); valRng.add(Double.NEGATIVE_INFINITY); valRng.add(Double.POSITIVE_INFINITY); ArrayList<Double> setPts = new ArrayList(); setPts.add(Double.NEGATIVE_INFINITY); setPts.add(Double.POSITIVE_INFINITY); datOpt.put("valid range", valRng); datOpt.put("set-points", setPts); cfgSignals.add(sigHM); } /*algorithms: - id: RESIDUAL_TEST type: LPCF history window: 60 outlier threshold: .inf event threshold: .inf event timeout: 5 event window save: 30 */ ArrayList cfgAlgorithms = new ArrayList(); HashMap lpcfEntry = new HashMap(); HashMap mvnnEntry = new HashMap(); HashMap BED; int histWindow = (int) (86400000 / this.stepDel); int eventTimeOut = (int) (3600000 / this.stepDel); int bedWindow = (int) (3600000 / this.stepDel); int eventWinSave = eventTimeOut + bedWindow + bedWindow; lpcfEntry.put("id", "test_lpcf"); lpcfEntry.put("type", "LPCF"); lpcfEntry.put("history window", histWindow); lpcfEntry.put("outlier threshold", 1.2); lpcfEntry.put("event threshold", 0.94); lpcfEntry.put("event timeout", eventTimeOut); lpcfEntry.put("event window save", eventWinSave); BED = new HashMap(); BED.put("outlier probability", 0.5); BED.put("window", bedWindow); lpcfEntry.put("BED", BED); mvnnEntry.put("id", "test_mvnn"); mvnnEntry.put("type", "MVNN"); mvnnEntry.put("history window", histWindow); mvnnEntry.put("outlier threshold", 1.2); mvnnEntry.put("event threshold", 0.94); mvnnEntry.put("event timeout", eventTimeOut); mvnnEntry.put("event window save", eventWinSave); BED = new HashMap(); BED.put("outlier probability", 0.5); BED.put("window", bedWindow); mvnnEntry.put("BED", BED); cfgAlgorithms.add(lpcfEntry); cfgAlgorithms.add(mvnnEntry); /*monitoring stations: - id: StationD station id number: 1 station tag name: StationD location id number: 4 enabled: yes inputs: - id: csvfile outputs: - id: outputfiles signals: - id: CAL_StationD - id: TEST_CL - id: TEST_PH - id: TEST_TEMP - id: TEST_COND - id: TEST_TURB - id: TEST_TOC - id: TEST_PUMPS - id: RATIO_PH_CL2 algorithms: - id: RESIDUAL_TEST */ ArrayList cfgMonStations = new ArrayList(); HashMap station = new HashMap(); cfgMonStations.add(station); station.put("id", "NEW_STATION"); station.put("station id number", 1); station.put("station tag name", "NEW_STATION_TAG"); station.put("location id number", 1); station.put("enabled", true); ArrayList stnInputs = new ArrayList(); station.put("inputs", stnInputs); HashMap inpId = new HashMap(); inpId.put("id", "csvfile"); stnInputs.add(inpId); ArrayList stnOutputs = new ArrayList(); station.put("outputs", stnOutputs); ArrayList stnSignals = new ArrayList(); for (SimpleChannelDefn defn : channelDefs) { HashMap sigHM = new HashMap(); sigHM.put("id", defn.name); stnSignals.add(sigHM); } station.put("signals", stnSignals); ArrayList algs = new ArrayList(); HashMap algLPCF = new HashMap(); algLPCF.put("id", "test_lpcf"); algs.add(algLPCF); HashMap algMVNN = new HashMap(); algMVNN.put("id", "test_mvnn"); algs.add(algMVNN); station.put("algorithms", algs); HashMap config = new HashMap(); config.put("canary", cfgCanary); config.put("timing options", cfgTiming); config.put("data sources", cfgDataSources); config.put("signals", cfgSignals); config.put("algorithms", cfgAlgorithms); config.put("monitoring stations", cfgMonStations); return config; } }
/* * [The "BSD license"] * Copyright (c) 2012 Terence Parr * Copyright (c) 2012 Sam Harwell * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.antlr.v4.codegen; import org.antlr.runtime.ANTLRStringStream; import org.antlr.runtime.Token; import org.antlr.v4.codegen.model.RuleFunction; import org.antlr.v4.codegen.model.chunk.ActionChunk; import org.antlr.v4.codegen.model.chunk.ActionText; import org.antlr.v4.codegen.model.chunk.ArgRef; import org.antlr.v4.codegen.model.chunk.LabelRef; import org.antlr.v4.codegen.model.chunk.ListLabelRef; import org.antlr.v4.codegen.model.chunk.LocalRef; import org.antlr.v4.codegen.model.chunk.NonLocalAttrRef; import org.antlr.v4.codegen.model.chunk.QRetValueRef; import org.antlr.v4.codegen.model.chunk.RetValueRef; import org.antlr.v4.codegen.model.chunk.RulePropertyRef; import org.antlr.v4.codegen.model.chunk.RulePropertyRef_ctx; import org.antlr.v4.codegen.model.chunk.RulePropertyRef_start; import org.antlr.v4.codegen.model.chunk.RulePropertyRef_stop; import org.antlr.v4.codegen.model.chunk.RulePropertyRef_text; import org.antlr.v4.codegen.model.chunk.SetAttr; import org.antlr.v4.codegen.model.chunk.SetNonLocalAttr; import org.antlr.v4.codegen.model.chunk.ThisRulePropertyRef_ctx; import org.antlr.v4.codegen.model.chunk.ThisRulePropertyRef_start; import org.antlr.v4.codegen.model.chunk.ThisRulePropertyRef_stop; import org.antlr.v4.codegen.model.chunk.ThisRulePropertyRef_text; import org.antlr.v4.codegen.model.chunk.TokenPropertyRef; import org.antlr.v4.codegen.model.chunk.TokenPropertyRef_channel; import org.antlr.v4.codegen.model.chunk.TokenPropertyRef_index; import org.antlr.v4.codegen.model.chunk.TokenPropertyRef_int; import org.antlr.v4.codegen.model.chunk.TokenPropertyRef_line; import org.antlr.v4.codegen.model.chunk.TokenPropertyRef_pos; import org.antlr.v4.codegen.model.chunk.TokenPropertyRef_text; import org.antlr.v4.codegen.model.chunk.TokenPropertyRef_type; import org.antlr.v4.codegen.model.chunk.TokenRef; import org.antlr.v4.codegen.model.decl.StructDecl; import org.antlr.v4.parse.ActionSplitter; import org.antlr.v4.parse.ActionSplitterListener; import org.antlr.v4.tool.Attribute; import org.antlr.v4.tool.ErrorType; import org.antlr.v4.tool.Grammar; import org.antlr.v4.tool.Rule; import org.antlr.v4.tool.ast.ActionAST; import java.lang.reflect.Constructor; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** */ public class ActionTranslator implements ActionSplitterListener { public static final Map<String, Class<? extends RulePropertyRef>> thisRulePropToModelMap = new HashMap<String, Class<? extends RulePropertyRef>>(); static { thisRulePropToModelMap.put("start", ThisRulePropertyRef_start.class); thisRulePropToModelMap.put("stop", ThisRulePropertyRef_stop.class); thisRulePropToModelMap.put("text", ThisRulePropertyRef_text.class); thisRulePropToModelMap.put("ctx", ThisRulePropertyRef_ctx.class); } public static final Map<String, Class<? extends RulePropertyRef>> rulePropToModelMap = new HashMap<String, Class<? extends RulePropertyRef>>(); static { rulePropToModelMap.put("start", RulePropertyRef_start.class); rulePropToModelMap.put("stop", RulePropertyRef_stop.class); rulePropToModelMap.put("text", RulePropertyRef_text.class); rulePropToModelMap.put("ctx", RulePropertyRef_ctx.class); } public static final Map<String, Class<? extends TokenPropertyRef>> tokenPropToModelMap = new HashMap<String, Class<? extends TokenPropertyRef>>(); static { tokenPropToModelMap.put("text", TokenPropertyRef_text.class); tokenPropToModelMap.put("type", TokenPropertyRef_type.class); tokenPropToModelMap.put("line", TokenPropertyRef_line.class); tokenPropToModelMap.put("index", TokenPropertyRef_index.class); tokenPropToModelMap.put("pos", TokenPropertyRef_pos.class); tokenPropToModelMap.put("channel", TokenPropertyRef_channel.class); tokenPropToModelMap.put("int", TokenPropertyRef_int.class); } CodeGenerator gen; ActionAST node; RuleFunction rf; List<ActionChunk> chunks = new ArrayList<ActionChunk>(); OutputModelFactory factory; StructDecl nodeContext; public ActionTranslator(OutputModelFactory factory, ActionAST node) { this.factory = factory; this.node = node; this.gen = factory.getGenerator(); } public static String toString(List<ActionChunk> chunks) { StringBuilder buf = new StringBuilder(); for (ActionChunk c : chunks) buf.append(c.toString()); return buf.toString(); } public static List<ActionChunk> translateAction(OutputModelFactory factory, RuleFunction rf, Token tokenWithinAction, ActionAST node) { String action = tokenWithinAction.getText(); if ( action.charAt(0)=='{' ) { int firstCurly = action.indexOf('{'); int lastCurly = action.lastIndexOf('}'); if ( firstCurly>=0 && lastCurly>=0 ) { action = action.substring(firstCurly+1, lastCurly); // trim {...} } } return translateActionChunk(factory, rf, action, node); } public static List<ActionChunk> translateActionChunk(OutputModelFactory factory, RuleFunction rf, String action, ActionAST node) { Token tokenWithinAction = node.token; ActionTranslator translator = new ActionTranslator(factory, node); translator.rf = rf; factory.getGrammar().tool.log("action-translator", "translate " + action); String altLabel = node.getAltLabel(); if ( rf!=null ) translator.nodeContext = rf.ruleCtx; if ( altLabel!=null ) translator.nodeContext = rf.altLabelCtxs.get(altLabel); ANTLRStringStream in = new ANTLRStringStream(action); in.setLine(tokenWithinAction.getLine()); in.setCharPositionInLine(tokenWithinAction.getCharPositionInLine()); ActionSplitter trigger = new ActionSplitter(in, translator); // forces eval, triggers listener methods trigger.getActionTokens(); return translator.chunks; } @Override public void attr(String expr, Token x) { gen.g.tool.log("action-translator", "attr "+x); Attribute a = node.resolver.resolveToAttribute(x.getText(), node); if ( a!=null ) { switch ( a.dict.type ) { case ARG: chunks.add(new ArgRef(nodeContext,x.getText())); break; case RET: chunks.add(new RetValueRef(rf.ruleCtx, x.getText())); break; case LOCAL: chunks.add(new LocalRef(nodeContext,x.getText())); break; case PREDEFINED_RULE: chunks.add(getRulePropertyRef(x)); break; } } if ( node.resolver.resolvesToToken(x.getText(), node) ) { chunks.add(new TokenRef(nodeContext,getTokenLabel(x.getText()))); // $label return; } if ( node.resolver.resolvesToLabel(x.getText(), node) ) { chunks.add(new LabelRef(nodeContext,getTokenLabel(x.getText()))); // $x for x=ID etc... return; } if ( node.resolver.resolvesToListLabel(x.getText(), node) ) { chunks.add(new ListLabelRef(nodeContext,x.getText())); // $ids for ids+=ID etc... return; } Rule r = factory.getGrammar().getRule(x.getText()); if ( r!=null ) { chunks.add(new LabelRef(nodeContext,getRuleLabel(x.getText()))); // $r for r rule ref } } @Override public void qualifiedAttr(String expr, Token x, Token y) { gen.g.tool.log("action-translator", "qattr "+x+"."+y); if ( node.resolver.resolveToAttribute(x.getText(), node)!=null ) { // must be a member access to a predefined attribute like $ctx.foo attr(expr, x); chunks.add(new ActionText(nodeContext, "."+y.getText())); return; } Attribute a = node.resolver.resolveToAttribute(x.getText(), y.getText(), node); switch ( a.dict.type ) { case ARG: chunks.add(new ArgRef(nodeContext,y.getText())); break; // has to be current rule case RET: if ( factory.getCurrentRuleFunction()!=null && factory.getCurrentRuleFunction().name.equals(x.getText()) ) { chunks.add(new RetValueRef(rf.ruleCtx, y.getText())); break; } else { chunks.add(new QRetValueRef(nodeContext, getRuleLabel(x.getText()), y.getText())); break; } case PREDEFINED_RULE: if ( factory.getCurrentRuleFunction()!=null && factory.getCurrentRuleFunction().name.equals(x.getText()) ) { chunks.add(getRulePropertyRef(y)); } else { chunks.add(getRulePropertyRef(x, y)); } break; case TOKEN: chunks.add(getTokenPropertyRef(x, y)); break; } } @Override public void setAttr(String expr, Token x, Token rhs) { gen.g.tool.log("action-translator", "setAttr "+x+" "+rhs); List<ActionChunk> rhsChunks = translateActionChunk(factory,rf,rhs.getText(),node); SetAttr s = new SetAttr(nodeContext, x.getText(), rhsChunks); chunks.add(s); } @Override public void nonLocalAttr(String expr, Token x, Token y) { gen.g.tool.log("action-translator", "nonLocalAttr "+x+"::"+y); Rule r = factory.getGrammar().getRule(x.getText()); chunks.add(new NonLocalAttrRef(nodeContext, x.getText(), y.getText(), r.index)); } @Override public void setNonLocalAttr(String expr, Token x, Token y, Token rhs) { gen.g.tool.log("action-translator", "setNonLocalAttr "+x+"::"+y+"="+rhs); Rule r = factory.getGrammar().getRule(x.getText()); List<ActionChunk> rhsChunks = translateActionChunk(factory,rf,rhs.getText(),node); SetNonLocalAttr s = new SetNonLocalAttr(nodeContext, x.getText(), y.getText(), r.index, rhsChunks); chunks.add(s); } @Override public void text(String text) { chunks.add(new ActionText(nodeContext,text)); } TokenPropertyRef getTokenPropertyRef(Token x, Token y) { try { Class<? extends TokenPropertyRef> c = tokenPropToModelMap.get(y.getText()); Constructor<? extends TokenPropertyRef> ctor = c.getConstructor(StructDecl.class, String.class); TokenPropertyRef ref = ctor.newInstance(nodeContext, getTokenLabel(x.getText())); return ref; } catch (Exception e) { factory.getGrammar().tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, e); } return null; } // $text RulePropertyRef getRulePropertyRef(Token prop) { try { Class<? extends RulePropertyRef> c = thisRulePropToModelMap.get(prop.getText()); Constructor<? extends RulePropertyRef> ctor = c.getConstructor(StructDecl.class, String.class); RulePropertyRef ref = ctor.newInstance(nodeContext, getRuleLabel(prop.getText())); return ref; } catch (Exception e) { factory.getGrammar().tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, e); } return null; } RulePropertyRef getRulePropertyRef(Token x, Token prop) { Grammar g = factory.getGrammar(); try { Class<? extends RulePropertyRef> c = rulePropToModelMap.get(prop.getText()); Constructor<? extends RulePropertyRef> ctor = c.getConstructor(StructDecl.class, String.class); RulePropertyRef ref = ctor.newInstance(nodeContext, getRuleLabel(x.getText())); return ref; } catch (Exception e) { g.tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, e, prop.getText()); } return null; } public String getTokenLabel(String x) { if ( node.resolver.resolvesToLabel(x, node) ) return x; return factory.getGenerator().getTarget().getImplicitTokenLabel(x); } public String getRuleLabel(String x) { if ( node.resolver.resolvesToLabel(x, node) ) return x; return factory.getGenerator().getTarget().getImplicitRuleLabel(x); } }
/* * Copyright (c) 2010, Marco Brade * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.sf.prefixedproperties.spring; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.util.LinkedList; import java.util.List; import java.util.Properties; import net.sf.prefixedproperties.PrefixedProperties; import net.sf.prefixedproperties.config.PrefixConfig; import org.springframework.beans.factory.config.PropertyOverrideConfigurer; import org.springframework.core.io.Resource; import org.springframework.jmx.export.annotation.ManagedAttribute; import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.util.CollectionUtils; /** * The PrefixedPropertyOverrideConfigurer behaves the same as * {@link PropertyOverrideConfigurer} with the difference that you can. To * configure the default prefix you can use the following methods: * {@link #setEnvironmentFactory(EnvironmentFactory)}<br> * {@link #setDefaultPrefixSystemPropertyKey(String)}<br> * {@link #setDefaultPrefix(String)}<br> * The usage of the methods will be in the same order as above. */ @ManagedResource("prefixedproperties:name=PrefixedPropertyOverrideConfigurer") public class PrefixedPropertyOverrideConfigurer extends PropertyOverrideConfigurer { /** The local override. */ protected boolean localOverride; /** The local properties. */ protected Properties[] localProperties; /** The locations. */ protected Resource[] locations; /** The ignore resource not found. */ protected boolean ignoreResourceNotFound; /** The file encoding. */ protected String fileEncoding; /** The prefix config list. */ protected List<PrefixConfig> prefixConfigList; /** The default prefix. */ protected String defaultPrefix; private PrefixedProperties myProperties; private final PrefixedPropertiesPersister persister = new PrefixedPropertiesPersister(); private String defaultPrefixSystemPropertyKey; protected EnvironmentFactory environmentFactory = null; private boolean mixDefaultAndLocalPrefixConfigurations = false; /** * Creates the properties. * * @return the prefixed properties */ protected synchronized PrefixedProperties createProperties() { if (myProperties == null) { PrefixedProperties resultProperties = null; String environment = defaultPrefix; if (environmentFactory != null) { environment = environmentFactory.getEnvironment(); } else if (defaultPrefixSystemPropertyKey != null) { environment = System.getProperty(defaultPrefixSystemPropertyKey); if (environment == null) { if (logger.isWarnEnabled()) { logger.warn(String.format("Didn't found system property key to set default prefix: %1s", defaultPrefixSystemPropertyKey)); } } } if (prefixConfigList != null) { resultProperties = PrefixedProperties.createCascadingPrefixProperties(prefixConfigList); } else { if (environment != null) { resultProperties = PrefixedProperties.createCascadingPrefixProperties(environment); } else { resultProperties = new PrefixedProperties(); } } resultProperties.setDefaultPrefix(environment); if (logger.isInfoEnabled()) { logger.info(String.format("Setting default prefix to: %1s", environment)); } resultProperties.setMixDefaultAndLocalPrefixSettings(mixDefaultAndLocalPrefixConfigurations); myProperties = resultProperties; } return myProperties; } /** * Gets the effective properties. * * @return the effective properties */ @ManagedAttribute() public List<String> getEffectiveProperties() { final List<String> properties = new LinkedList<String>(); for (final String key : myProperties.stringPropertyNames()) { properties.add(key + "=" + myProperties.get(key)); } return properties; } /** * Gets the prefixed properties. * * @return the prefixed properties */ public PrefixedProperties getPrefixedProperties() { return myProperties; } public boolean isMixDefaultAndLocalPrefixConfigurations() { return mixDefaultAndLocalPrefixConfigurations; } /** * Load properties. * * @param props * the props * @throws IOException * Signals that an I/O exception has occurred. */ @Override protected void loadProperties(final Properties props) throws IOException { if (locations != null) { for (int i = 0; i < locations.length; i++) { final Resource location = locations[i]; if (logger.isInfoEnabled()) { logger.info("Loading properties file from " + location); } File file = null; InputStream is = null; try { try { file = location.getFile(); is = new BufferedInputStream(new FileInputStream(file)); } catch (final IOException ie) {// ignore } finally { if (file == null) { is = location.getInputStream(); } } if (location.getFilename().toLowerCase().endsWith(Constants.XML_FILE_EXTENSION)) { persister.loadFromXml(props, is); } else if (location.getFilename().toLowerCase().endsWith(Constants.JSON_FILE_EXTENSION)) { if (fileEncoding != null) { persister.loadFromJson(props, new InputStreamReader(is, Charset.forName(fileEncoding))); } else { persister.loadFromJson(props, is); } } else if (location.getFilename().toLowerCase().endsWith(Constants.YAML_FILE_EXTENSION)) { if (fileEncoding != null) { persister.loadFromYAML(props, new InputStreamReader(is, Charset.forName(fileEncoding))); } else { persister.loadFromYAML(props, is); } } else { if (fileEncoding != null) { persister.load(props, new InputStreamReader(is, Charset.forName(fileEncoding))); } else { persister.load(props, is); } } } catch (final IOException ex) { if (ignoreResourceNotFound) { if (logger.isWarnEnabled()) { logger.warn("Could not load properties from " + location + ": " + ex.getMessage()); } } else { throw ex; } } finally { if (is != null) { is.close(); } } } } } /* * (non-Javadoc) * * @see org.springframework.core.io.support.PropertiesLoaderSupport# * mergeProperties() */ @Override protected Properties mergeProperties() throws IOException { final PrefixedProperties myProperties = createProperties(); if (localOverride) { loadProperties(myProperties); } if (localProperties != null) { for (int i = 0; i < localProperties.length; i++) { CollectionUtils.mergePropertiesIntoMap(localProperties[i], myProperties); } } if (!localOverride) { loadProperties(myProperties); } return myProperties; } /** * Sets the default prefix. * * @param defaultPrefix * the new default prefix */ public void setDefaultPrefix(final String defaultPrefix) { this.defaultPrefix = defaultPrefix; } /** * Sets this method to specify a system property to be used as an * environment. The value of the property will be used for setting the * default prefix. {@link PrefixedProperties#setDefaultPrefix(String)} * * @param defaultPrefixSystemPropertyKey * the new ddefault prefix system property key */ public void setDefaultPrefixSystemPropertyKey(final String defaultPrefixSystemPropertyKey) { this.defaultPrefixSystemPropertyKey = defaultPrefixSystemPropertyKey; } /** * Sets the environment factory. * * @param environmentFactory * the new environment factory */ public void setEnvironmentFactory(final EnvironmentFactory environmentFactory) { this.environmentFactory = environmentFactory; } /** * Set the encoding to use for parsing properties files. * <p> * Default is none, using the <code>java.util.Properties</code> default * encoding. * <p> * Only applies to classic properties files, not to XML files. * * @param encoding * the new file encoding * @see org.springframework.util.PropertiesPersister#load */ @Override public void setFileEncoding(final String encoding) { fileEncoding = encoding; } /** * Set if failure to find the property resource should be ignored. * <p> * "true" is appropriate if the properties file is completely optional. * Default is "false". * * @param ignoreResourceNotFound * the new ignore resource not found */ @Override public void setIgnoreResourceNotFound(final boolean ignoreResourceNotFound) { this.ignoreResourceNotFound = ignoreResourceNotFound; } /* * (non-Javadoc) * * @see org.springframework.core.io.support.PropertiesLoaderSupport# * setLocalOverride(boolean) */ @Override public void setLocalOverride(final boolean localOverride) { this.localOverride = localOverride; } /** * Set a location of a properties file to be loaded. * <p> * Can point to a classic properties file or to an XML file that follows JDK * 1.5's properties XML format. * * @param location * the new location */ @Override public void setLocation(final Resource location) { locations = new Resource[] { location }; } public void setMixDefaultAndLocalPrefixConfigurations(final boolean mixDefaultAndLocalPrefixConfigurations) { this.mixDefaultAndLocalPrefixConfigurations = mixDefaultAndLocalPrefixConfigurations; } /** * Sets the prefix configs. * * @param configs * the new prefix configs */ public void setPrefixConfigs(final List<PrefixConfig> configs) { prefixConfigList = configs; } /** * Set local properties, e.g. via the "props" tag in XML bean definitions. * These can be considered defaults, to be overridden by properties loaded * from files. * * @param properties * the new properties */ @Override public void setProperties(final Properties properties) { localProperties = new Properties[] { properties }; } /** * Set local properties, e.g. via the "props" tag in XML bean definitions, * allowing for merging multiple properties sets into one. * * @param propertiesArray * the new properties array */ @Override public void setPropertiesArray(final Properties... propertiesArray) { localProperties = propertiesArray; } }
package org.apache.maven.plugin.doap; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.io.FileReader; import java.io.IOException; import org.apache.maven.plugin.doap.options.DoapArtifact; import org.apache.maven.plugin.doap.options.DoapOptions; import org.apache.maven.plugin.testing.AbstractMojoTestCase; import org.apache.maven.project.MavenProject; import org.codehaus.plexus.util.IOUtil; import org.codehaus.plexus.util.StringUtils; /** * Test {@link DoapMojo} class. * * @author <a href="mailto:vincent.siveton@gmail.com">Vincent Siveton</a> * @version $Id$ */ public class DoapMojoTest extends AbstractMojoTestCase { @Override protected void setUp() throws Exception { super.setUp(); } @Override protected void tearDown() throws Exception { super.tearDown(); } /** * Verify the generation of a pure DOAP file. * * @throws Exception if any */ public void testGeneratedDoap() throws Exception { File pluginXmlFile = new File( getBasedir(), "src/test/resources/unit/doap-configuration/doap-configuration-plugin-config.xml" ); DoapMojo mojo = (DoapMojo) lookupMojo( "generate", pluginXmlFile ); assertNotNull( "Mojo found.", mojo ); MavenProject mavenProject = (MavenProject) getVariableValueFromObject( mojo, "project" ); assertNotNull( mavenProject ); // Set some Mojo parameters setVariableValueToObject( mojo, "remoteRepositories", mavenProject.getRemoteArtifactRepositories() ); setVariableValueToObject( mojo, "about", mavenProject.getUrl() ); mojo.execute(); File doapFile = new File( getBasedir(), "target/test/unit/doap-configuration/doap-configuration.rdf" ); assertTrue( "Doap File was not generated!", doapFile.exists() ); String readed = readFile( doapFile ); // Validate // Pure DOAP assertTrue( readed.contains( "<rdf:RDF xml:lang=\"en\" xmlns=\"http://usefulinc.com/ns/doap#\" " + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" " + "xmlns:foaf=\"http://xmlns.com/foaf/0.1/\">" ) ); assertTrue( readed.contains( "<Project rdf:about=\"" + mavenProject.getUrl() + "\">" ) ); assertTrue( readed.contains( "<description xml:lang=\"en\">Test the DOAP plugin</description>" ) ); assertTrue( readed.contains( "<shortdesc xml:lang=\"en\">Test the DOAP plugin</shortdesc>" ) ); assertTrue( readed.contains( "<homepage rdf:resource=\"" + mavenProject.getUrl() + "\"/>" ) ); assertTrue( readed.contains( "<category>library</category>" ) ); assertTrue( readed.contains( "<created>2008-01-01</created>" ) ); assertTrue( readed.contains( "<name>" + mavenProject.getName() + "</name>" ) ); assertTrue( readed.contains( "<download-page rdf:resource=\"http://foo.org/download.html\"/>" ) ); assertTrue( readed.contains( "<programming-language>Java</programming-language>" ) ); assertTrue( readed.contains( "<bug-database rdf:resource=\"http://jira.codehaus.org/browse/MDOAPTEST\"/>" ) ); assertTrue( readed.contains( "<license rdf:resource=\"http://www.apache.org/licenses/LICENSE-2.0.txt\"/>" ) ); assertTrue( readed.contains( "<SVNRepository>" ) ); assertTrue( readed.contains( "<location rdf:resource=\"http://svn.foo.org/repos/asf/maven/plugins/trunk/maven-doap-plugin/src/test/resources/unit/doap-configuration\"/>" ) ); assertTrue( readed.contains( "<browse rdf:resource=\"http://svn.foo.org/viewvc/maven/plugins/trunk/maven-doap-plugin/src/test/resources/unit/doap-configuration\"/>" ) ); assertTrue( readed.contains( "<location rdf:resource=\"https://svn.foo.org/repos/asf/maven/plugins/trunk/maven-doap-plugin/src/test/resources/unit/doap-configuration\"/>" ) ); // conf assertTrue( readed.contains( "<audience>developers</audience>" ) ); assertTrue( readed.contains( "<blog rdf:resource=\"http://myblog.foo.org\"/>" ) ); assertTrue( readed.contains( "<implements>JSR-foo</implements>" ) ); assertTrue( readed.contains( "<language>en</language>" ) ); assertTrue( readed.contains( "<language>fr</language>" ) ); assertTrue( readed.contains( "<old-homepage rdf:resource=\"http://old.foo.org\"/>" ) ); assertTrue( readed.contains( "<os>windows</os>" ) ); assertTrue( readed.contains( "<os>linux</os>" ) ); assertTrue( readed.contains( "<os>mac</os>" ) ); assertTrue( readed.contains( "<platform>java</platform>" ) ); assertTrue( readed.contains( "<platform>firefox</platform>" ) ); assertTrue( readed.contains( "<screenshots rdf:resource=\"" + mavenProject.getUrl() +"/screenshots.html\"/>" ) ); assertTrue( readed.contains( "<service-endpoint rdf:resource=\"http://webservice.foo.org\"/>" ) ); assertTrue( readed.contains( "<wiki rdf:resource=\"http://wiki.foo.org\"/>" ) ); // ASF ext assertFalse( readed.contains( "<asfext:pmc rdf:resource=\"" + mavenProject.getUrl() + "\"/>" ) ); assertFalse( readed.contains( "<asfext:name>" + mavenProject.getName() + "</name>" ) ); // Developers and Organizations assertTrue( readed.contains( "<maintainer>" ) ); assertTrue( readed.contains( "<foaf:Person rdf:nodeID=\"b" ) ); assertTrue( readed.contains( "<foaf:name>Jane Doe</foaf:name>" ) ); assertTrue( readed.contains( "<foaf:Organization>" ) ); assertTrue( readed.contains( "<foaf:homepage rdf:resource=\"http://www.example.org\"/>" ) ); assertTrue( readed.contains( "<foaf:member rdf:nodeID=\"b" ) ); } /** * @throws Exception if any */ public void testLangParameter() throws Exception { File pluginXmlFile = new File( getBasedir(), "src/test/resources/unit/doap-configuration/doap-configuration-plugin-config.xml" ); DoapMojo mojo = (DoapMojo) lookupMojo( "generate", pluginXmlFile ); assertNotNull( "Mojo found.", mojo ); MavenProject mavenProject = (MavenProject) getVariableValueFromObject( mojo, "project" ); assertNotNull( mavenProject ); // check invalid lang setVariableValueToObject( mojo, "remoteRepositories", mavenProject.getRemoteArtifactRepositories() ); setVariableValueToObject( mojo, "lang", "foo" ); try { mojo.execute(); assertTrue( "No lang checked", false ); } catch ( Exception e ) { assertTrue( true ); } } /** * @throws Exception if any */ public void testAboutParameter() throws Exception { File pluginXmlFile = new File( getBasedir(), "src/test/resources/unit/doap-configuration/doap-configuration-plugin-config.xml" ); DoapMojo mojo = (DoapMojo) lookupMojo( "generate", pluginXmlFile ); assertNotNull( "Mojo found.", mojo ); MavenProject mavenProject = (MavenProject) getVariableValueFromObject( mojo, "project" ); assertNotNull( mavenProject ); // check invalid lang setVariableValueToObject( mojo, "remoteRepositories", mavenProject.getRemoteArtifactRepositories() ); setVariableValueToObject( mojo, "about", "foo" ); try { mojo.execute(); } catch ( Exception e ) { assertTrue( true ); } } /** * Verify the generation of a DOAP file from an artifact. * * @throws Exception if any */ public void testGeneratedDoapArtifact() throws Exception { File pluginXmlFile = new File( getBasedir(), "src/test/resources/unit/doap-configuration/doap-configuration-plugin-config.xml" ); DoapMojo mojo = (DoapMojo) lookupMojo( "generate", pluginXmlFile ); assertNotNull( "Mojo found.", mojo ); MavenProject mavenProject = (MavenProject) getVariableValueFromObject( mojo, "project" ); assertNotNull( mavenProject ); // Set some Mojo parameters setVariableValueToObject( mojo, "remoteRepositories", mavenProject.getRemoteArtifactRepositories() ); setVariableValueToObject( mojo, "about", mavenProject.getUrl() ); DoapOptions doapOptions = (DoapOptions)getVariableValueFromObject( mojo, "doapOptions" ); doapOptions.setDescription( "Common Utilities" ); doapOptions.setShortdesc( "Common Utilities" ); doapOptions.setDownloadPage( "http://plexus.codehaus.org/download-binaries.html" ); setVariableValueToObject( mojo, "doapOptions", doapOptions ); DoapArtifact artifact = new DoapArtifact(); artifact.setGroupId( "org.codehaus.plexus" ); artifact.setArtifactId( "plexus-utils" ); artifact.setVersion( "1.5.5" ); setVariableValueToObject( mojo, "artifact", artifact ); setVariableValueToObject( mojo, "outputDirectory", "target/test/unit/doap-configuration/" ); mojo.execute(); File doapFile = new File( getBasedir(), "target/test/unit/doap-configuration/doap_plexus-utils.rdf" ); assertTrue( "Doap File was not generated!", doapFile.exists() ); String readed = readFile( doapFile ); // Validate // Pure DOAP assertTrue( readed.contains( "<rdf:RDF xml:lang=\"en\" xmlns=\"http://usefulinc.com/ns/doap#\" " + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" " + "xmlns:foaf=\"http://xmlns.com/foaf/0.1/\">" ) ); assertTrue( readed.contains( "<Project rdf:about=\"http://plexus.codehaus.org/plexus-utils\">" ) ); assertTrue( readed.contains( "<name>Plexus Common Utilities</name>" ) ); assertTrue( readed.contains( "<description xml:lang=\"en\">Common Utilities</description>" ) ); assertTrue( readed.contains( "<shortdesc xml:lang=\"en\">Common Utilities</shortdesc>" ) ); assertTrue( readed.contains( "<created>2001-01-01</created>" ) ); assertTrue( readed.contains( "<download-page rdf:resource=\"http://plexus.codehaus.org/download-binaries.html\"/>" ) ); assertTrue( readed.contains( "<programming-language>Java</programming-language>" ) ); assertTrue( readed.contains( "<bug-database rdf:resource=\"http://jira.codehaus.org/browse/PLXUTILS\"/>" ) ); assertTrue( readed.contains( "<license rdf:resource=\"http://www.apache.org/licenses/LICENSE-2.0.txt\"/>" ) ); assertTrue( readed.contains( "<SVNRepository>" ) ); assertTrue( readed.contains( "<location rdf:resource=\"http://svn.codehaus.org/plexus/plexus-utils/tags/plexus-utils-1.5.5\"/>" ) ); assertTrue( readed.contains( "<browse rdf:resource=\"http://fisheye.codehaus.org/browse/plexus/plexus-utils/tags/plexus-utils-1.5.5\"/>" ) ); // conf assertTrue( readed.contains( "<audience>developers</audience>" ) ); assertTrue( readed.contains( "<blog rdf:resource=\"http://myblog.foo.org\"/>" ) ); assertTrue( readed.contains( "<implements>JSR-foo</implements>" ) ); assertTrue( readed.contains( "<language>en</language>" ) ); assertTrue( readed.contains( "<language>fr</language>" ) ); assertTrue( readed.contains( "<old-homepage rdf:resource=\"http://old.foo.org\"/>" ) ); assertTrue( readed.contains( "<os>windows</os>" ) ); assertTrue( readed.contains( "<os>linux</os>" ) ); assertTrue( readed.contains( "<os>mac</os>" ) ); assertTrue( readed.contains( "<platform>java</platform>" ) ); assertTrue( readed.contains( "<screenshots rdf:resource=\"http://plexus.codehaus.org/plexus-utils/screenshots.html\"/>" ) ); assertTrue( readed.contains( "<service-endpoint rdf:resource=\"http://webservice.foo.org\"/>" ) ); assertTrue( readed.contains( "<wiki rdf:resource=\"http://wiki.foo.org\"/>" ) ); } /** * Verify the generation of a DOAP file from a minimalist artifact. * * @throws Exception if any */ public void testGeneratedDoapArtifactMinimalist() throws Exception { File pluginXmlFile = new File( getBasedir(), "src/test/resources/unit/doap-configuration/doap-configuration-plugin-config.xml" ); DoapMojo mojo = (DoapMojo) lookupMojo( "generate", pluginXmlFile ); assertNotNull( "Mojo found.", mojo ); MavenProject mavenProject = (MavenProject) getVariableValueFromObject( mojo, "project" ); assertNotNull( mavenProject ); // Set some Mojo parameters setVariableValueToObject( mojo, "remoteRepositories", mavenProject.getRemoteArtifactRepositories() ); setVariableValueToObject( mojo, "about", "foo" ); DoapOptions doapOptions = new DoapOptions(); doapOptions.setName( "XStream" ); doapOptions.setDescription( "XStream is a simple library to serialize objects to XML and back again." ); doapOptions.setShortdesc( "XML Serializer" ); doapOptions.setHomepage( "http://xstream.codehaus.org/" ); doapOptions.setDownloadPage( "http://xstream.codehaus.org/download.html" ); doapOptions.setBugDatabase( "http://jira.codehaus.org/browse/XSTR" ); doapOptions.setLicense( "http://xstream.codehaus.org/license.html" ); doapOptions.setScmDeveloper( "http://svn.codehaus.org/xstream/trunk/xstream" ); doapOptions.setMailingList( "http://xstream.codehaus.org/list-user.html" ); doapOptions.setCreated( "2000-01-01"); setVariableValueToObject( mojo, "doapOptions", doapOptions ); DoapArtifact artifact = new DoapArtifact(); artifact.setGroupId( "xstream" ); artifact.setArtifactId( "xstream" ); artifact.setVersion( "1.1" ); setVariableValueToObject( mojo, "artifact", artifact ); setVariableValueToObject( mojo, "outputDirectory", "target/test/unit/doap-configuration/" ); mojo.execute(); File doapFile = new File( getBasedir(), "target/test/unit/doap-configuration/doap_xstream.rdf" ); assertTrue( "Doap File was not generated!", doapFile.exists() ); String readed = readFile( doapFile ); // Validate // Pure DOAP assertTrue( readed.contains( "<rdf:RDF xml:lang=\"en\" xmlns=\"http://usefulinc.com/ns/doap#\" " + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" " + "xmlns:foaf=\"http://xmlns.com/foaf/0.1/\">" ) ); assertTrue( readed.contains( "<Project>" ) ); assertTrue( readed.contains( "<name>XStream</name>" ) ); assertTrue( readed.contains( "<description xml:lang=\"en\">XStream is a simple library to serialize objects to XML and back again.</description>" ) ); assertTrue( readed.contains( "<shortdesc xml:lang=\"en\">XML Serializer</shortdesc>" ) ); assertTrue( readed.contains( "<created>2000-01-01</created>" ) ); assertTrue( readed.contains( "<download-page rdf:resource=\"http://xstream.codehaus.org/download.html\"/>" ) ); assertTrue( readed.contains( "<programming-language>Java</programming-language>" ) ); assertTrue( readed.contains( "<bug-database rdf:resource=\"http://jira.codehaus.org/browse/XSTR\"/>" ) ); assertTrue( readed.contains( "<license rdf:resource=\"http://xstream.codehaus.org/license.html\"/>" ) ); assertTrue( readed.contains( "<Repository>" ) ); assertTrue( readed.contains( "<location rdf:resource=\"http://svn.codehaus.org/xstream/trunk/xstream\"/>" ) ); assertTrue( readed.contains( "<mailing-list rdf:resource=\"http://xstream.codehaus.org/list-user.html\"/>" ) ); // conf assertFalse( readed.contains( "<audience>" ) ); assertFalse( readed.contains( "<blog rdf:resource=" ) ); assertFalse( readed.contains( "<implements>" ) ); assertFalse( readed.contains( "<language>" ) ); assertFalse( readed.contains( "<old-homepage rdf:resource=" ) ); assertFalse( readed.contains( "<os>" ) ); assertFalse( readed.contains( "<platform>" ) ); assertFalse( readed.contains( "<screenshots rdf:resource=" ) ); assertFalse( readed.contains( "<service-endpoint rdf:resource=" ) ); assertFalse( readed.contains( "<wiki rdf:resource=" ) ); } /** * Verify the generation of a DOAP file with ASF extension. * * @throws Exception if any */ public void testGeneratedDoapForASF() throws Exception { File pluginXmlFile = new File( getBasedir(), "src/test/resources/unit/asf-doap-configuration/asf-doap-configuration-plugin-config.xml" ); DoapMojo mojo = (DoapMojo) lookupMojo( "generate", pluginXmlFile ); assertNotNull( "Mojo found.", mojo ); MavenProject mavenProject = (MavenProject) getVariableValueFromObject( mojo, "project" ); assertNotNull( mavenProject ); // Set some Mojo parameters setVariableValueToObject( mojo, "remoteRepositories", mavenProject.getRemoteArtifactRepositories() ); setVariableValueToObject( mojo, "about", mavenProject.getUrl() ); mojo.execute(); File doapFile = new File( getBasedir(), "target/test/unit/asf-doap-configuration/asf-doap-configuration.rdf" ); assertTrue( "Doap File was not generated!", doapFile.exists() ); String readed = readFile( doapFile ); // Validate // ASF DOAP assertTrue( readed.contains( "<rdf:RDF xml:lang=\"en\" xmlns=\"http://usefulinc.com/ns/doap#\" " + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" " + "xmlns:foaf=\"http://xmlns.com/foaf/0.1/\" " + "xmlns:asfext=\"http://projects.apache.org/ns/asfext#\">" ) ); if ( StringUtils.isNotEmpty( mavenProject.getUrl() ) ) { assertTrue( readed.contains( "<Project rdf:about=\"" + mavenProject.getUrl() + "\">" ) ); assertTrue( readed.contains( "<homepage rdf:resource=\"" + mavenProject.getUrl() + "\"/>" ) ); } assertTrue( readed.contains( "<name>Apache " + mavenProject.getName() + "</name>" ) ); assertTrue( readed.contains( "<programming-language>Java</programming-language>" ) ); assertTrue( readed.contains( "<category rdf:resource=\"http://projects.apache.org/category/library\"/>" ) ); // ASF ext assertTrue( readed.contains( "<asfext:pmc rdf:resource=\"" + mavenProject.getUrl() + "\"/>" ) ); assertTrue( readed.contains( "<asfext:name>Apache " + mavenProject.getName() + "</asfext:name>" ) ); assertTrue( readed.contains( "<asfext:charter>" ) ); assertTrue( readed.contains( "<asfext:chair>" ) ); } /** * Verify the generation of a DOAP file with extra extension. * * @throws Exception if any */ public void testGeneratedExtraDoap() throws Exception { File pluginXmlFile = new File( getBasedir(), "src/test/resources/unit/doap-configuration/doap-extra-configuration-plugin-config.xml" ); DoapMojo mojo = (DoapMojo) lookupMojo( "generate", pluginXmlFile ); assertNotNull( "Mojo found.", mojo ); MavenProject mavenProject = (MavenProject) getVariableValueFromObject( mojo, "project" ); assertNotNull( mavenProject ); // Set some Mojo parameters setVariableValueToObject( mojo, "remoteRepositories", mavenProject.getRemoteArtifactRepositories() ); setVariableValueToObject( mojo, "about", mavenProject.getUrl() ); mojo.execute(); File doapFile = new File( getBasedir(), "target/test/unit/doap-configuration/doap-extra-configuration.rdf" ); assertTrue( "Doap File was not generated!", doapFile.exists() ); String readed = readFile( doapFile ); assertTrue( readed.contains( "<ciManagement rdf:resource=\"http://ci.foo.org\"/>" ) ); assertTrue( readed.contains( "<asfext:status>active</asfext:status>" ) ); assertTrue( readed.contains( "<labs:status>active</labs:status>" ) ); } /** * @param file * @return * @throws IOException if any */ private String readFile( File file ) throws IOException { String result = null; FileReader reader = null; try { // platform encoding reader = new FileReader( file ); result = IOUtil.toString( reader ); } finally { IOUtil.close( reader ); } return result; } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.find.impl; import com.intellij.codeInsight.highlighting.HighlightManager; import com.intellij.codeInsight.highlighting.HighlightManagerImpl; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.hint.HintManagerImpl; import com.intellij.codeInsight.hint.HintUtil; import com.intellij.find.*; import com.intellij.find.findUsages.FindUsagesManager; import com.intellij.find.impl.livePreview.SearchResults; import com.intellij.lang.Language; import com.intellij.lang.LanguageParserDefinitions; import com.intellij.lang.LanguageUtil; import com.intellij.lang.ParserDefinition; import com.intellij.lexer.LayeredLexer; import com.intellij.lexer.Lexer; import com.intellij.navigation.NavigationItem; import com.intellij.notification.NotificationDisplayType; import com.intellij.notification.NotificationGroup; import com.intellij.notification.NotificationType; import com.intellij.notification.impl.NotificationsConfigurationImpl; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.colors.TextAttributesKey; import com.intellij.openapi.editor.ex.FoldingModelEx; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.fileEditor.FileEditor; import com.intellij.openapi.fileEditor.TextEditor; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.fileTypes.PlainSyntaxHighlighter; import com.intellij.openapi.fileTypes.SyntaxHighlighter; import com.intellij.openapi.fileTypes.SyntaxHighlighterFactory; import com.intellij.openapi.fileTypes.impl.AbstractFileType; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.patterns.StringPattern; import com.intellij.psi.*; import com.intellij.psi.search.SearchScope; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; import com.intellij.ui.LightweightHint; import com.intellij.ui.ReplacePromptDialog; import com.intellij.usages.ChunkExtractor; import com.intellij.usages.impl.SyntaxHighlighterOverEditorHighlighter; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.IntObjectMap; import com.intellij.util.containers.Predicate; import com.intellij.util.text.CharArrayUtil; import com.intellij.util.text.ImmutableCharSequence; import com.intellij.util.text.StringSearcher; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.util.List; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; public final class FindManagerImpl extends FindManager { private static final Logger LOG = Logger.getInstance(FindManagerImpl.class); private final FindUsagesManager myFindUsagesManager; private boolean isFindWasPerformed; private boolean isSelectNextOccurrenceWasPerformed; private Point myReplaceInFilePromptPos = new Point(-1, -1); private Point myReplaceInProjectPromptPos = new Point(-1, -1); private final FindModel myFindInProjectModel = new FindModel(); private final FindModel myFindInFileModel = new FindModel(); private FindModel myFindNextModel; private FindModel myPreviousFindModel; private static final FindResultImpl NOT_FOUND_RESULT = new FindResultImpl(); private final Project myProject; private static final Key<Boolean> HIGHLIGHTER_WAS_NOT_FOUND_KEY = Key.create("com.intellij.find.impl.FindManagerImpl.HighlighterNotFoundKey"); private FindUIHelper myHelper; private static final NotificationGroup GROUP = new NotificationGroup("Find Problems", NotificationDisplayType.STICKY_BALLOON, false); public FindManagerImpl(@NotNull Project project) { myProject = project; FindSettings findSettings = FindSettings.getInstance(); findSettings.initModelBySetings(myFindInProjectModel); myFindInFileModel.setCaseSensitive(findSettings.isLocalCaseSensitive()); myFindInFileModel.setWholeWordsOnly(findSettings.isLocalWholeWordsOnly()); myFindInFileModel.setRegularExpressions(findSettings.isLocalRegularExpressions()); myFindUsagesManager = new FindUsagesManager(myProject); myFindInProjectModel.setMultipleFiles(true); NotificationsConfigurationImpl.remove("FindInPath"); Disposer.register(project, () -> { if (myHelper != null) { Disposer.dispose(myHelper); } }); } @Override public FindModel createReplaceInFileModel() { FindModel model = new FindModel(); model.copyFrom(getFindInFileModel()); model.setReplaceState(true); model.setPromptOnReplace(false); return model; } @Override public int showPromptDialog(@NotNull final FindModel model, String title) { return showPromptDialogImpl(model, title, null); } @PromptResultValue private int showPromptDialogImpl(@NotNull final FindModel model, String title, @Nullable final MalformedReplacementStringException exception) { ReplacePromptDialog replacePromptDialog = new ReplacePromptDialog(model.isMultipleFiles(), title, myProject, exception) { @Override @Nullable public Point getInitialLocation() { if (model.isMultipleFiles() && myReplaceInProjectPromptPos.x >= 0 && myReplaceInProjectPromptPos.y >= 0){ return myReplaceInProjectPromptPos; } if (!model.isMultipleFiles() && myReplaceInFilePromptPos.x >= 0 && myReplaceInFilePromptPos.y >= 0){ return myReplaceInFilePromptPos; } return null; } }; replacePromptDialog.show(); if (model.isMultipleFiles()){ myReplaceInProjectPromptPos = replacePromptDialog.getLocation(); } else{ myReplaceInFilePromptPos = replacePromptDialog.getLocation(); } return replacePromptDialog.getExitCode(); } void changeGlobalSettings(FindModel findModel) { String stringToFind = findModel.getStringToFind(); FindInProjectSettings findInProjectSettings = FindInProjectSettings.getInstance(myProject); if (!StringUtil.isEmpty(stringToFind)) { findInProjectSettings.addStringToFind(stringToFind); } if (!findModel.isMultipleFiles()) { setFindWasPerformed(); } if (findModel.isReplaceState()) { findInProjectSettings.addStringToReplace(findModel.getStringToReplace()); } if (findModel.isMultipleFiles() && !findModel.isProjectScope() && findModel.getDirectoryName() != null) { findInProjectSettings.addDirectory(findModel.getDirectoryName()); myFindInProjectModel.setWithSubdirectories(findModel.isWithSubdirectories()); } } @Override public void showFindDialog(@NotNull FindModel model, @NotNull Runnable okHandler) { if (myHelper == null || Disposer.isDisposed(myHelper)) { myHelper = new FindUIHelper(myProject, model, okHandler); Disposer.register(myHelper, () -> myHelper = null); } else { myHelper.setModel(model); myHelper.setOkHandler(okHandler); } myHelper.showUI(); } @Override @NotNull public FindModel getFindInFileModel() { return myFindInFileModel; } @Override @NotNull public FindModel getFindInProjectModel() { myFindInProjectModel.setFromCursor(false); myFindInProjectModel.setForward(true); myFindInProjectModel.setGlobal(true); myFindInProjectModel.setMultiline(Registry.is("ide.find.as.popup.allow.multiline")); myFindInProjectModel.setSearchInProjectFiles(false); return myFindInProjectModel; } @Override public boolean findWasPerformed() { return isFindWasPerformed; } @Override public void setFindWasPerformed() { isFindWasPerformed = true; isSelectNextOccurrenceWasPerformed = false; } @Override public boolean selectNextOccurrenceWasPerformed() { return isSelectNextOccurrenceWasPerformed; } @Override public void setSelectNextOccurrenceWasPerformed() { isSelectNextOccurrenceWasPerformed = true; isFindWasPerformed = false; } @Override public FindModel getFindNextModel() { return myFindNextModel; } @Override public FindModel getFindNextModel(@NotNull final Editor editor) { if (myFindNextModel == null) return null; EditorSearchSession search = EditorSearchSession.get(editor); if (search != null && !isSelectNextOccurrenceWasPerformed) { String textInField = search.getTextInField(); if (!Comparing.equal(textInField, myFindInFileModel.getStringToFind()) && !textInField.isEmpty()) { FindModel patched = new FindModel(); patched.copyFrom(myFindNextModel); patched.setStringToFind(textInField); return patched; } } return myFindNextModel; } @Override public void setFindNextModel(FindModel findNextModel) { myFindNextModel = findNextModel; myProject.getMessageBus().syncPublisher(FIND_MODEL_TOPIC).findNextModelChanged(); } @Override @NotNull public FindResult findString(@NotNull CharSequence text, int offset, @NotNull FindModel model){ return findString(text, offset, model, null); } @NotNull @Override public FindResult findString(@NotNull CharSequence text, int offset, @NotNull FindModel model, @Nullable VirtualFile file) { if (LOG.isDebugEnabled()) { LOG.debug("offset="+offset); LOG.debug("textlength="+text.length()); LOG.debug(model.toString()); } return findStringLoop(text, offset, model, file, getFindContextPredicate(model, file, text)); } private FindResult findStringLoop(CharSequence text, int offset, FindModel model, VirtualFile file, @Nullable Predicate<? super FindResult> filter) { final char[] textArray = CharArrayUtil.fromSequenceWithoutCopying(text); while(true) { FindResult result = doFindString(text, textArray, offset, model, file); if (filter == null || filter.apply(result)) { if (!model.isWholeWordsOnly()) { return result; } if (!result.isStringFound()) { return result; } if (isWholeWord(text, result.getStartOffset(), result.getEndOffset())) { return result; } } offset = model.isForward() ? result.getStartOffset() + 1 : result.getEndOffset() - 1; if (offset > text.length() || offset < 0) return NOT_FOUND_RESULT; } } private class FindExceptCommentsOrLiteralsData implements Predicate<FindResult> { private final VirtualFile myFile; private final FindModel myFindModel; private final TreeMap<Integer, Integer> mySkipRangesSet; private final CharSequence myText; private FindExceptCommentsOrLiteralsData(VirtualFile file, FindModel model, CharSequence text) { myFile = file; myFindModel = model.clone(); myText = ImmutableCharSequence.asImmutable(text); TreeMap<Integer, Integer> result = new TreeMap<>(); if (model.isExceptComments() || model.isExceptCommentsAndStringLiterals()) { addRanges(file, model, text, result, FindModel.SearchContext.IN_COMMENTS); } if (model.isExceptStringLiterals() || model.isExceptCommentsAndStringLiterals()) { addRanges(file, model, text, result, FindModel.SearchContext.IN_STRING_LITERALS); } mySkipRangesSet = result; } private void addRanges(VirtualFile file, FindModel model, CharSequence text, TreeMap<Integer, Integer> result, FindModel.SearchContext searchContext) { FindModel clonedModel = model.clone(); clonedModel.setSearchContext(searchContext); clonedModel.setForward(true); int offset = 0; while(true) { FindResult customResult = findStringLoop(text, offset, clonedModel, file, null); if (!customResult.isStringFound()) break; result.put(customResult.getStartOffset(), customResult.getEndOffset()); offset = Math.max(customResult.getEndOffset(), offset + 1); // avoid loop for zero size reg exps matches if (offset >= text.length()) break; } } boolean isAcceptableFor(FindModel model, VirtualFile file, CharSequence text) { return Comparing.equal(myFile, file) && myFindModel.equals(model) && myText.length() == text.length() ; } @Override public boolean apply(@Nullable FindResult input) { if (input == null || !input.isStringFound()) return true; NavigableMap<Integer, Integer> map = mySkipRangesSet.headMap(input.getStartOffset(), true); for(Map.Entry<Integer, Integer> e:map.descendingMap().entrySet()) { // [e.key, e.value] intersect with [input.start, input.end] if (e.getKey() <= input.getStartOffset() && (input.getStartOffset() <= e.getValue() || e.getValue() >= input.getEndOffset())) return false; if (e.getValue() <= input.getStartOffset()) break; } return true; } } private static final Key<FindExceptCommentsOrLiteralsData> ourExceptCommentsOrLiteralsDataKey = Key.create("except.comments.literals.search.data"); private Predicate<FindResult> getFindContextPredicate(@NotNull FindModel model, VirtualFile file, CharSequence text) { if (file == null) return null; FindModel.SearchContext context = model.getSearchContext(); if( context == FindModel.SearchContext.ANY || context == FindModel.SearchContext.IN_COMMENTS || context == FindModel.SearchContext.IN_STRING_LITERALS) { return null; } synchronized (model) { FindExceptCommentsOrLiteralsData data = model.getUserData(ourExceptCommentsOrLiteralsDataKey); if (data == null || !data.isAcceptableFor(model, file, text)) { model.putUserData(ourExceptCommentsOrLiteralsDataKey, data = new FindExceptCommentsOrLiteralsData(file, model, text)); } return data; } } @Override public int showMalformedReplacementPrompt(@NotNull FindModel model, String title, MalformedReplacementStringException exception) { return showPromptDialogImpl(model, title, exception); } @Override public FindModel getPreviousFindModel() { return myPreviousFindModel; } @Override public void setPreviousFindModel(FindModel previousFindModel) { myPreviousFindModel = previousFindModel; } private static boolean isWholeWord(CharSequence text, int startOffset, int endOffset) { boolean isWordStart; if (startOffset != 0) { boolean previousCharacterIsIdentifier = Character.isJavaIdentifierPart(text.charAt(startOffset - 1)) && (startOffset <= 1 || text.charAt(startOffset - 2) != '\\'); boolean previousCharacterIsSameAsNext = text.charAt(startOffset - 1) == text.charAt(startOffset); boolean firstCharacterIsIdentifier = Character.isJavaIdentifierPart(text.charAt(startOffset)); isWordStart = firstCharacterIsIdentifier ? !previousCharacterIsIdentifier : !previousCharacterIsSameAsNext; } else { isWordStart = true; } boolean isWordEnd; if (endOffset != text.length()) { boolean nextCharacterIsIdentifier = Character.isJavaIdentifierPart(text.charAt(endOffset)); boolean nextCharacterIsSameAsPrevious = endOffset > 0 && text.charAt(endOffset) == text.charAt(endOffset - 1); boolean lastSearchedCharacterIsIdentifier = endOffset > 0 && Character.isJavaIdentifierPart(text.charAt(endOffset - 1)); isWordEnd = lastSearchedCharacterIsIdentifier ? !nextCharacterIsIdentifier : !nextCharacterIsSameAsPrevious; } else { isWordEnd = true; } return isWordStart && isWordEnd; } @NotNull private static FindModel normalizeIfMultilined(@NotNull FindModel findmodel) { if (findmodel.isMultiline()) { final FindModel model = new FindModel(); model.copyFrom(findmodel); final String s = model.getStringToFind(); String newStringToFind; if (findmodel.isRegularExpressions()) { newStringToFind = StringUtil.replace(s, "\\n", "\n"); // temporary convert back escaped symbols newStringToFind = newStringToFind.replaceAll( "\n", "\\\\n\\\\s*"); // add \\s* for convenience } else { newStringToFind = StringUtil.escapeToRegexp(s); newStringToFind = newStringToFind.replaceAll("\\\\n\\s*", "\\\\n\\\\s*"); model.setRegularExpressions(true); } model.setStringToFind(newStringToFind); return model; } return findmodel; } @NotNull private FindResult doFindString(@NotNull CharSequence text, char @Nullable [] textArray, int offset, @NotNull FindModel findmodel, @Nullable VirtualFile file) { FindModel model = normalizeIfMultilined(findmodel); String toFind = model.getStringToFind(); if (toFind.isEmpty()){ return NOT_FOUND_RESULT; } if (model.isInCommentsOnly() || model.isInStringLiteralsOnly()) { if (file == null) return NOT_FOUND_RESULT; return findInCommentsAndLiterals(text, textArray, offset, model, file); } if (model.isRegularExpressions()){ return findStringByRegularExpression(text, offset, model, file); } final StringSearcher searcher = createStringSearcher(model); int index; if (model.isForward()){ final int res = searcher.scan(text, textArray, offset, text.length()); index = res < 0 ? -1 : res; } else { index = offset == 0 ? -1 : searcher.scan(text, textArray, 0, offset-1); } if (index < 0){ return NOT_FOUND_RESULT; } return new FindResultImpl(index, index + toFind.length()); } @NotNull private static StringSearcher createStringSearcher(@NotNull FindModel model) { return new StringSearcher(model.getStringToFind(), model.isCaseSensitive(), model.isForward()); } static void clearPreviousFindData(FindModel model) { synchronized (model) { model.putUserData(ourCommentsLiteralsSearchDataKey, null); model.putUserData(ourExceptCommentsOrLiteralsDataKey, null); } } private static class CommentsLiteralsSearchData { final VirtualFile lastFile; int startOffset; final SyntaxHighlighterOverEditorHighlighter highlighter; TokenSet tokensOfInterest; final StringSearcher searcher; final Matcher matcher; final Set<Language> relevantLanguages; final FindModel model; CommentsLiteralsSearchData(VirtualFile lastFile, Set<Language> relevantLanguages, SyntaxHighlighterOverEditorHighlighter highlighter, TokenSet tokensOfInterest, StringSearcher searcher, Matcher matcher, FindModel model) { this.lastFile = lastFile; this.highlighter = highlighter; this.tokensOfInterest = tokensOfInterest; this.searcher = searcher; this.matcher = matcher; this.relevantLanguages = relevantLanguages; this.model = model; } } private static final Key<CommentsLiteralsSearchData> ourCommentsLiteralsSearchDataKey = Key.create("comments.literals.search.data"); @NotNull private FindResult findInCommentsAndLiterals(@NotNull CharSequence text, char[] textArray, int offset, @NotNull FindModel model, @NotNull final VirtualFile file) { synchronized (model) { FileType ftype = file.getFileType(); Language lang = LanguageUtil.getLanguageForPsi(myProject, file); CommentsLiteralsSearchData data = model.getUserData(ourCommentsLiteralsSearchDataKey); if (data == null || !Comparing.equal(data.lastFile, file) || !data.model.equals(model)) { SyntaxHighlighter highlighter = getHighlighter(file, lang); if (highlighter == null) { // no syntax highlighter -> no search return NOT_FOUND_RESULT; } TokenSet tokensOfInterest = TokenSet.EMPTY; Set<Language> relevantLanguages; if (lang != null) { final Language finalLang = lang; relevantLanguages = ReadAction.compute(() -> { THashSet<Language> result = new THashSet<>(); FileViewProvider viewProvider = PsiManager.getInstance(myProject).findViewProvider(file); if (viewProvider != null) { result.addAll(viewProvider.getLanguages()); } if (result.isEmpty()) { result.add(finalLang); } return result; }); for (Language relevantLanguage : relevantLanguages) { tokensOfInterest = addTokenTypesForLanguage(model, relevantLanguage, tokensOfInterest); } } else { relevantLanguages = new HashSet<>(); if (ftype instanceof AbstractFileType) { if (model.isInCommentsOnly()) { tokensOfInterest = TokenSet.create(CustomHighlighterTokenType.LINE_COMMENT, CustomHighlighterTokenType.MULTI_LINE_COMMENT); } if (model.isInStringLiteralsOnly()) { tokensOfInterest = TokenSet.orSet(tokensOfInterest, TokenSet .create(CustomHighlighterTokenType.STRING, CustomHighlighterTokenType.SINGLE_QUOTED_STRING)); } } } Matcher matcher = model.isRegularExpressions() ? compileRegExp(model, "") : null; StringSearcher searcher = matcher != null ? null : new StringSearcher(model.getStringToFind(), model.isCaseSensitive(), true); LayeredLexer.ourDisableLayersFlag.set(Boolean.TRUE); try { SyntaxHighlighterOverEditorHighlighter highlighterAdapter = new SyntaxHighlighterOverEditorHighlighter(highlighter, file, myProject); data = new CommentsLiteralsSearchData(file, relevantLanguages, highlighterAdapter, tokensOfInterest, searcher, matcher, model.clone()); data.highlighter.restart(text); } finally { LayeredLexer.ourDisableLayersFlag.set(null); } model.putUserData(ourCommentsLiteralsSearchDataKey, data); } int initialStartOffset = model.isForward() && data.startOffset < offset ? data.startOffset : 0; data.highlighter.resetPosition(initialStartOffset); final Lexer lexer = data.highlighter.getHighlightingLexer(); IElementType tokenType; TokenSet tokens = data.tokensOfInterest; int lastGoodOffset = 0; boolean scanningForward = model.isForward(); FindResultImpl prevFindResult = NOT_FOUND_RESULT; while ((tokenType = lexer.getTokenType()) != null) { if (lexer.getState() == 0) lastGoodOffset = lexer.getTokenStart(); final TextAttributesKey[] keys = data.highlighter.getTokenHighlights(tokenType); if (tokens.contains(tokenType) || model.isInStringLiteralsOnly() && ChunkExtractor.isHighlightedAsString(keys) || model.isInCommentsOnly() && ChunkExtractor.isHighlightedAsComment(keys) ) { int start = lexer.getTokenStart(); int end = lexer.getTokenEnd(); if (model.isInStringLiteralsOnly()) { // skip literal quotes itself from matching char c = text.charAt(start); if (c == '"' || c == '\'') { while (start < end && c == text.charAt(start)) { ++start; if (c == text.charAt(end - 1) && start < end) --end; } } } final int tokenContentStart = start; while (true) { FindResultImpl findResult = null; if (data.searcher != null) { int matchStart = data.searcher.scan(text, textArray, start, end); if (matchStart != -1 && matchStart >= start) { final int matchEnd = matchStart + model.getStringToFind().length(); if (matchStart >= offset || !scanningForward) findResult = new FindResultImpl(matchStart, matchEnd); else { start = matchEnd; continue; } } } else if (start <= end) { data.matcher.reset(StringPattern.newBombedCharSequence(text.subSequence(tokenContentStart, end))); data.matcher.region(start - tokenContentStart, end - tokenContentStart); data.matcher.useTransparentBounds(true); if (data.matcher.find()) { final int matchEnd = tokenContentStart + data.matcher.end(); int matchStart = tokenContentStart + data.matcher.start(); if (matchStart >= offset || !scanningForward) { findResult = new FindResultImpl(matchStart, matchEnd); } else { int diff = 0; if (start == end || start == matchEnd) { diff = 1; } start = matchEnd + diff; continue; } } } if (findResult != null) { if (scanningForward) { data.startOffset = lastGoodOffset; return findResult; } else { if (findResult.getEndOffset() >= offset) return prevFindResult; prevFindResult = findResult; start = findResult.getEndOffset(); continue; } } break; } } else { Language tokenLang = tokenType.getLanguage(); if (tokenLang != lang && tokenLang != Language.ANY && !data.relevantLanguages.contains(tokenLang)) { tokens = addTokenTypesForLanguage(model, tokenLang, tokens); data.tokensOfInterest = tokens; data.relevantLanguages.add(tokenLang); } } lexer.advance(); } return prevFindResult; } } private static TokenSet addTokenTypesForLanguage(FindModel model, Language lang, TokenSet tokensOfInterest) { ParserDefinition definition = LanguageParserDefinitions.INSTANCE.forLanguage(lang); if (definition != null) { tokensOfInterest = TokenSet.orSet(tokensOfInterest, model.isInCommentsOnly() ? definition.getCommentTokens(): TokenSet.EMPTY); tokensOfInterest = TokenSet.orSet(tokensOfInterest, model.isInStringLiteralsOnly() ? definition.getStringLiteralElements() : TokenSet.EMPTY); } return tokensOfInterest; } private static SyntaxHighlighter getHighlighter(VirtualFile file, @Nullable Language lang) { SyntaxHighlighter syntaxHighlighter = lang != null ? SyntaxHighlighterFactory.getSyntaxHighlighter(lang, null, file) : null; if (lang == null || syntaxHighlighter instanceof PlainSyntaxHighlighter) { syntaxHighlighter = SyntaxHighlighterFactory.getSyntaxHighlighter(file.getFileType(), null, file); } return syntaxHighlighter; } private FindResult findStringByRegularExpression(CharSequence text, int startOffset, FindModel model, VirtualFile file) { Matcher matcher = compileRegExp(model, text); if (matcher == null) { return NOT_FOUND_RESULT; } try { if (model.isForward()) { if (matcher.find(startOffset)) { if (matcher.end() <= text.length()) { return new FindResultImpl(matcher.start(), matcher.end()); } } return NOT_FOUND_RESULT; } else { int start = -1; int end = -1; while (matcher.find() && matcher.end() < startOffset) { start = matcher.start(); end = matcher.end(); } if (start < 0) { return NOT_FOUND_RESULT; } return new FindResultImpl(start, end); } } catch (StackOverflowError soe) { String stringToFind = model.getStringToFind(); if (!ApplicationManager.getApplication().isHeadlessEnvironment() && ourReportedPatterns.put(stringToFind.hashCode(), Boolean.TRUE) == null) { String content = stringToFind + " produced stack overflow when matching content of the file"; LOG.info(content); GROUP.createNotification("Regular expression failed to match", content + " " + file.getPath(), NotificationType.ERROR, null ).notify(myProject); } return NOT_FOUND_RESULT; } } private static final IntObjectMap<Boolean> ourReportedPatterns = ContainerUtil.createConcurrentIntObjectMap(); private static Matcher compileRegExp(FindModel model, CharSequence text) { Pattern pattern = model.compileRegExp(); return pattern == null ? null : pattern.matcher( StringPattern.newBombedCharSequence(text) ); } @Override public String getStringToReplace(@NotNull String foundString, @NotNull FindModel model, int startOffset, @NotNull CharSequence documentText) throws MalformedReplacementStringException{ String toReplace = model.getStringToReplace(); if (model.isRegularExpressions()) { return getStringToReplaceByRegexp(model, documentText, startOffset); } if (model.isPreserveCase()) { return replaceWithCaseRespect (toReplace, foundString); } return toReplace; } private static String getStringToReplaceByRegexp(@NotNull final FindModel model, @NotNull CharSequence text, int startOffset) throws MalformedReplacementStringException { Matcher matcher = compileRegexAndFindFirst(model, text, startOffset); return getStringToReplaceByRegexp(model, matcher); } private static String getStringToReplaceByRegexp(@NotNull final FindModel model, Matcher matcher) throws MalformedReplacementStringException{ if (matcher == null) return null; try { String toReplace = model.getStringToReplace(); return new RegExReplacementBuilder(matcher).createReplacement(toReplace); } catch (Exception e) { throw createMalformedReplacementException(model, e); } } private static Matcher compileRegexAndFindFirst(FindModel model, CharSequence text, int startOffset) { model = normalizeIfMultilined(model); Matcher matcher = compileRegExp(model, text); if (model.isForward()){ if (!matcher.find(startOffset)) { return null; } if (matcher.end() > text.length()) { return null; } } else { int start = -1; while(matcher.find() && matcher.end() < startOffset){ start = matcher.start(); } if (start < 0){ return null; } } return matcher; } private static MalformedReplacementStringException createMalformedReplacementException(FindModel model, Exception e) { return new MalformedReplacementStringException(FindBundle.message("find.replace.invalid.replacement.string", model.getStringToReplace()), e); } private static String replaceWithCaseRespect(String toReplace, String foundString) { if (foundString.isEmpty() || toReplace.isEmpty()) return toReplace; StringBuilder buffer = new StringBuilder(); if (Character.isUpperCase(foundString.charAt(0))) { buffer.append(Character.toUpperCase(toReplace.charAt(0))); } else { buffer.append(Character.toLowerCase(toReplace.charAt(0))); } if (toReplace.length() == 1) return buffer.toString(); if (foundString.length() == 1) { buffer.append(toReplace.substring(1)); return buffer.toString(); } boolean isReplacementLowercase = true; boolean isReplacementUppercase = true; for (int i = 1; i < toReplace.length(); i++) { char replacementChar = toReplace.charAt(i); if (!Character.isLetter(replacementChar)) continue; isReplacementLowercase &= Character.isLowerCase(replacementChar); isReplacementUppercase &= Character.isUpperCase(replacementChar); if (!isReplacementLowercase && !isReplacementUppercase) break; } boolean isTailUpper = true; boolean isTailLower = true; for (int i = 1; i < foundString.length(); i++) { char foundChar = foundString.charAt(i); if (!Character.isLetter(foundChar)) continue; isTailUpper &= Character.isUpperCase(foundChar); isTailLower &= Character.isLowerCase(foundChar); if (!isTailUpper && !isTailLower) break; } if (isTailUpper && (isReplacementLowercase || isReplacementUppercase)) { buffer.append(StringUtil.toUpperCase(toReplace.substring(1))); } else if (isTailLower && (isReplacementLowercase || isReplacementUppercase)) { buffer.append(StringUtil.toLowerCase(toReplace.substring(1))); } else { buffer.append(toReplace.substring(1)); } return buffer.toString(); } @Override public boolean canFindUsages(@NotNull PsiElement element) { return element.isValid() && myFindUsagesManager.canFindUsages(element); } @Override public void findUsages(@NotNull PsiElement element) { findUsages(element, false); } @Override public void findUsagesInScope(@NotNull PsiElement element, @NotNull SearchScope searchScope) { myFindUsagesManager.findUsages(element, null, null, false, searchScope); } @Override public void findUsages(@NotNull PsiElement element, boolean showDialog) { myFindUsagesManager.findUsages(element, null, null, showDialog, null); } @Override public void showSettingsAndFindUsages(NavigationItem @NotNull [] targets) { FindUsagesManager.showSettingsAndFindUsages(targets); } @Override public void clearFindingNextUsageInFile() { myFindUsagesManager.clearFindingNextUsageInFile(); } @Override public void findUsagesInEditor(@NotNull PsiElement element, @NotNull FileEditor fileEditor) { if (fileEditor instanceof TextEditor) { TextEditor textEditor = (TextEditor)fileEditor; Editor editor = textEditor.getEditor(); Document document = editor.getDocument(); PsiFile psiFile = PsiDocumentManager.getInstance(myProject).getPsiFile(document); myFindUsagesManager.findUsages(element, psiFile, fileEditor, false, null); } } private static boolean tryToFindNextUsageViaEditorSearchComponent(Editor editor, SearchResults.Direction forwardOrBackward) { EditorSearchSession search = EditorSearchSession.get(editor); if (search != null && search.hasMatches()) { if (!search.isSearchInProgress()) { if (forwardOrBackward == SearchResults.Direction.UP) { search.searchBackward(); } else { search.searchForward(); } } return true; } return false; } @Override public boolean findNextUsageInEditor(@NotNull FileEditor fileEditor) { if (!(fileEditor instanceof TextEditor)) return false; return findNextUsageInFile(((TextEditor) fileEditor).getEditor(), SearchResults.Direction.DOWN); } @Override public boolean findNextUsageInEditor(@NotNull Editor editor) { return findNextUsageInFile(editor, SearchResults.Direction.DOWN); } @Override public boolean findPreviousUsageInEditor(@NotNull Editor editor) { return findNextUsageInFile(editor, SearchResults.Direction.UP); } private boolean findNextUsageInFile(@NotNull Editor editor, @NotNull SearchResults.Direction direction) { editor.getCaretModel().removeSecondaryCarets(); if (tryToFindNextUsageViaEditorSearchComponent(editor, direction)) { return true; } RangeHighlighter[] highlighters = ((HighlightManagerImpl)HighlightManager.getInstance(myProject)).getHighlighters(editor); if (highlighters.length > 0) { return highlightNextHighlighter(highlighters, editor, editor.getCaretModel().getOffset(), direction == SearchResults.Direction.DOWN, false); } if (direction == SearchResults.Direction.DOWN) { return myFindUsagesManager.findNextUsageInFile(editor); } return myFindUsagesManager.findPreviousUsageInFile(editor); } @Override public boolean findPreviousUsageInEditor(@NotNull FileEditor fileEditor) { if (!(fileEditor instanceof TextEditor)) return false; return findNextUsageInFile(((TextEditor) fileEditor).getEditor(), SearchResults.Direction.UP); } private static boolean highlightNextHighlighter(RangeHighlighter[] highlighters, Editor editor, int offset, boolean isForward, boolean secondPass) { RangeHighlighter highlighterToSelect = null; Object wasNotFound = editor.getUserData(HIGHLIGHTER_WAS_NOT_FOUND_KEY); for (RangeHighlighter highlighter : highlighters) { int start = highlighter.getStartOffset(); int end = highlighter.getEndOffset(); if (highlighter.isValid() && start < end) { if (isForward && (start > offset || start == offset && secondPass)) { if (highlighterToSelect == null || highlighterToSelect.getStartOffset() > start) highlighterToSelect = highlighter; } if (!isForward && (end < offset || end == offset && secondPass)) { if (highlighterToSelect == null || highlighterToSelect.getEndOffset() < end) highlighterToSelect = highlighter; } } } if (highlighterToSelect != null) { expandFoldRegionsIfNecessary(editor, highlighterToSelect.getStartOffset(), highlighterToSelect.getEndOffset()); editor.getSelectionModel().setSelection(highlighterToSelect.getStartOffset(), highlighterToSelect.getEndOffset()); editor.getCaretModel().moveToOffset(highlighterToSelect.getStartOffset()); ScrollType scrollType; if (secondPass) { scrollType = isForward ? ScrollType.CENTER_UP : ScrollType.CENTER_DOWN; } else { scrollType = isForward ? ScrollType.CENTER_DOWN : ScrollType.CENTER_UP; } editor.getScrollingModel().scrollToCaret(scrollType); editor.putUserData(HIGHLIGHTER_WAS_NOT_FOUND_KEY, null); return true; } if (wasNotFound == null) { editor.putUserData(HIGHLIGHTER_WAS_NOT_FOUND_KEY, Boolean.TRUE); String message = FindBundle.message("find.highlight.no.more.highlights.found"); if (isForward) { AnAction action=ActionManager.getInstance().getAction(IdeActions.ACTION_FIND_NEXT); String shortcutsText=KeymapUtil.getFirstKeyboardShortcutText(action); if (shortcutsText.isEmpty()) { message = FindBundle.message("find.search.again.from.top.action.message", message); } else { message = FindBundle.message("find.search.again.from.top.hotkey.message", message, shortcutsText); } } else { AnAction action=ActionManager.getInstance().getAction(IdeActions.ACTION_FIND_PREVIOUS); String shortcutsText=KeymapUtil.getFirstKeyboardShortcutText(action); if (shortcutsText.isEmpty()) { message = FindBundle.message("find.search.again.from.bottom.action.message", message); } else { message = FindBundle.message("find.search.again.from.bottom.hotkey.message", message, shortcutsText); } } JComponent component = HintUtil.createInformationLabel(message); final LightweightHint hint = new LightweightHint(component); HintManagerImpl.getInstanceImpl().showEditorHint(hint, editor, HintManager.UNDER, HintManager.HIDE_BY_ANY_KEY | HintManager.HIDE_BY_TEXT_CHANGE | HintManager.HIDE_BY_SCROLLING, 0, false); return true; } if (!secondPass) { offset = isForward ? 0 : editor.getDocument().getTextLength(); return highlightNextHighlighter(highlighters, editor, offset, isForward, true); } return false; } private static void expandFoldRegionsIfNecessary(@NotNull Editor editor, final int startOffset, int endOffset) { final FoldingModel foldingModel = editor.getFoldingModel(); final FoldRegion[] regions; if (foldingModel instanceof FoldingModelEx) { regions = ((FoldingModelEx)foldingModel).fetchTopLevel(); } else { regions = foldingModel.getAllFoldRegions(); } if (regions == null) { return; } int i = Arrays.binarySearch(regions, null, (o1, o2) -> { // Find the first region that ends after the given start offset if (o1 == null) { return startOffset - o2.getEndOffset(); } return o1.getEndOffset() - startOffset; }); if (i < 0) { i = -i - 1; } else { i++; // Don't expand fold region that ends at the start offset. } if (i >= regions.length) { return; } final List<FoldRegion> toExpand = new ArrayList<>(); for (; i < regions.length; i++) { final FoldRegion region = regions[i]; if (region.getStartOffset() >= endOffset) { break; } if (!region.isExpanded()) { toExpand.add(region); } } if (toExpand.isEmpty()) { return; } foldingModel.runBatchFoldingOperation(() -> { for (FoldRegion region : toExpand) { region.setExpanded(true); } }); } @NotNull public FindUsagesManager getFindUsagesManager() { return myFindUsagesManager; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.query.relnode.visitor; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.apache.calcite.avatica.util.TimeUnitRange; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexDynamicParam; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexLocalRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexVisitorImpl; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.type.SqlTypeFamily; import org.apache.calcite.util.NlsString; import org.apache.kylin.common.util.DateFormat; import org.apache.kylin.common.util.Pair; import org.apache.kylin.metadata.filter.CaseTupleFilter; import org.apache.kylin.metadata.filter.ColumnTupleFilter; import org.apache.kylin.metadata.filter.CompareTupleFilter; import org.apache.kylin.metadata.filter.ConstantTupleFilter; import org.apache.kylin.metadata.filter.DynamicTupleFilter; import org.apache.kylin.metadata.filter.ExtractTupleFilter; import org.apache.kylin.metadata.filter.LogicalTupleFilter; import org.apache.kylin.metadata.filter.TupleFilter; import org.apache.kylin.metadata.filter.UnsupportedTupleFilter; import org.apache.kylin.metadata.filter.function.Functions; import org.apache.kylin.metadata.model.TblColRef; import org.apache.kylin.query.relnode.ColumnRowType; import java.math.BigDecimal; import java.util.GregorianCalendar; import java.util.List; import java.util.Map; import java.util.Set; public class TupleFilterVisitor extends RexVisitorImpl<TupleFilter> { final ColumnRowType inputRowType; public TupleFilterVisitor(ColumnRowType inputRowType) { super(true); this.inputRowType = inputRowType; } @Override public TupleFilter visitCall(RexCall call) { TupleFilter filter = null; SqlOperator op = call.getOperator(); switch (op.getKind()) { case AND: filter = new LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND); break; case OR: filter = new LogicalTupleFilter(TupleFilter.FilterOperatorEnum.OR); break; case NOT: filter = new LogicalTupleFilter(TupleFilter.FilterOperatorEnum.NOT); break; case EQUALS: filter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ); break; case GREATER_THAN: filter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT); break; case LESS_THAN: filter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.LT); break; case GREATER_THAN_OR_EQUAL: filter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.GTE); break; case LESS_THAN_OR_EQUAL: filter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.LTE); break; case NOT_EQUALS: filter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.NEQ); break; case IS_NULL: filter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.ISNULL); break; case IS_NOT_NULL: filter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.ISNOTNULL); break; case CAST: case REINTERPRET: // NOTE: use child directly break; case CASE: filter = new CaseTupleFilter(); break; case OTHER: if (op.getName().equalsIgnoreCase("extract_date")) { filter = new ExtractTupleFilter(TupleFilter.FilterOperatorEnum.EXTRACT); } else { filter = Functions.getFunctionTupleFilter(op.getName()); } break; case LIKE: case OTHER_FUNCTION: filter = Functions.getFunctionTupleFilter(op.getName()); break; case PLUS: case MINUS: case TIMES: case DIVIDE: TupleFilter f = dealWithTrivialExpr(call); if (f != null) { // is a trivial expr return f; } filter = new UnsupportedTupleFilter(TupleFilter.FilterOperatorEnum.UNSUPPORTED); break; default: filter = new UnsupportedTupleFilter(TupleFilter.FilterOperatorEnum.UNSUPPORTED); } for (RexNode operand : call.operands) { TupleFilter childFilter = operand.accept(this); if (filter == null) { filter = cast(childFilter, call.type); } else { filter.addChild(childFilter); } } if (op.getKind() == SqlKind.OR) { filter = mergeToInClause(filter); } else if (op.getKind() == SqlKind.NOT) { assert (filter.getChildren().size() == 1); filter = filter.getChildren().get(0).reverse(); } return filter; } //KYLIN-2597 - Deal with trivial expression in filters like x = 1 + 2 private TupleFilter dealWithTrivialExpr(RexCall call) { ImmutableList<RexNode> operators = call.operands; if (operators.size() != 2) { return null; } BigDecimal left = null; BigDecimal right = null; for (RexNode rexNode : operators) { if (!(rexNode instanceof RexLiteral)) { return null;// only trivial expr with constants } RexLiteral temp = (RexLiteral) rexNode; if (temp.getType().getFamily() != SqlTypeFamily.NUMERIC || !(temp.getValue() instanceof BigDecimal)) { return null;// only numeric constants now } if (left == null) { left = (BigDecimal) temp.getValue(); } else { right = (BigDecimal) temp.getValue(); } } Preconditions.checkNotNull(left); Preconditions.checkNotNull(right); switch (call.op.getKind()) { case PLUS: return new ConstantTupleFilter(left.add(right).toString()); case MINUS: return new ConstantTupleFilter(left.subtract(right).toString()); case TIMES: return new ConstantTupleFilter(left.multiply(right).toString()); case DIVIDE: return new ConstantTupleFilter(left.divide(right).toString()); default: return null; } } private TupleFilter cast(TupleFilter filter, RelDataType type) { if ((filter instanceof ConstantTupleFilter) == false) { return filter; } ConstantTupleFilter constFilter = (ConstantTupleFilter) filter; if (type.getFamily() == SqlTypeFamily.DATE || type.getFamily() == SqlTypeFamily.DATETIME || type.getFamily() == SqlTypeFamily.TIMESTAMP) { List<String> newValues = Lists.newArrayList(); for (Object v : constFilter.getValues()) { if (v == null) newValues.add(null); else newValues.add(String.valueOf(DateFormat.stringToMillis(v.toString()))); } constFilter = new ConstantTupleFilter(newValues); } return constFilter; } @VisibleForTesting static TupleFilter mergeToInClause(TupleFilter filter) { List<? extends TupleFilter> children = filter.getChildren(); if (children.isEmpty()) { return filter; } // key: inColumn // Value: first: inValues // Value: second: dynamicVariables Map<TblColRef, Pair<Set<Object>, Map<String, Object>>> inColumnMap = Maps.newHashMap(); List<TupleFilter> extraFilters = Lists.newLinkedList(); for (TupleFilter child : children) { if (child.getOperator() == TupleFilter.FilterOperatorEnum.EQ) { CompareTupleFilter compFilter = (CompareTupleFilter) child; TblColRef column = compFilter.getColumn(); if (column != null) { Pair<Set<Object>, Map<String, Object>> tmpValue = inColumnMap.get(column); if (tmpValue == null) { Set<Object> inValues = Sets.newHashSet(); Map<String, Object> dynamicVariables = Maps.newHashMap(); tmpValue = new Pair<>(inValues, dynamicVariables); inColumnMap.put(column, tmpValue); } tmpValue.getFirst().addAll(compFilter.getValues()); tmpValue.getSecond().putAll(compFilter.getVariables()); continue; } } extraFilters.add(child); } children.clear(); TupleFilter ret = new LogicalTupleFilter(TupleFilter.FilterOperatorEnum.OR); ret.addChildren(extraFilters); for (Map.Entry<TblColRef, Pair<Set<Object>, Map<String, Object>>> entry : inColumnMap.entrySet()) { CompareTupleFilter inFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.IN); inFilter.addChild(new ColumnTupleFilter(entry.getKey())); inFilter.addChild(new ConstantTupleFilter(entry.getValue().getFirst())); inFilter.getVariables().putAll(entry.getValue().getSecond()); ret.addChild(inFilter); } return ret.getChildren().size() == 1 ? ret.getChildren().get(0) : ret; } @Override public TupleFilter visitLocalRef(RexLocalRef localRef) { throw new UnsupportedOperationException("local ref:" + localRef); } @Override public TupleFilter visitInputRef(RexInputRef inputRef) { TblColRef column = inputRowType.getColumnByIndex(inputRef.getIndex()); ColumnTupleFilter filter = new ColumnTupleFilter(column); return filter; } @SuppressWarnings("unused") private String normToTwoDigits(int i) { if (i < 10) return "0" + i; else return "" + i; } @Override public TupleFilter visitLiteral(RexLiteral literal) { String strValue = null; Object literalValue = literal.getValue(); if (literalValue instanceof NlsString) { strValue = ((NlsString) literalValue).getValue(); } else if (literalValue instanceof GregorianCalendar) { GregorianCalendar g = (GregorianCalendar) literalValue; strValue = Long.toString(g.getTimeInMillis()); } else if (literalValue instanceof TimeUnitRange) { // Extract(x from y) in where clause strValue = ((TimeUnitRange) literalValue).name(); } else if (literalValue == null) { strValue = null; } else { strValue = literalValue.toString(); } TupleFilter filter = new ConstantTupleFilter(strValue); return filter; } @Override public TupleFilter visitDynamicParam(RexDynamicParam dynamicParam) { String name = dynamicParam.getName(); TupleFilter filter = new DynamicTupleFilter(name); return filter; } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.web.servletapi; import java.io.IOException; import java.security.Principal; import java.util.List; import javax.servlet.AsyncContext; import javax.servlet.AsyncListener; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.security.authentication.AuthenticationCredentialsNotFoundException; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.authentication.AuthenticationTrustResolver; import org.springframework.security.authentication.AuthenticationTrustResolverImpl; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.concurrent.DelegatingSecurityContextRunnable; import org.springframework.security.core.Authentication; import org.springframework.security.core.AuthenticationException; import org.springframework.security.core.context.SecurityContext; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.web.AuthenticationEntryPoint; import org.springframework.security.web.authentication.logout.LogoutHandler; import org.springframework.util.Assert; /** * Provides integration with the Servlet 3 APIs in addition to the ones found in * {@link HttpServlet25RequestFactory}. The additional methods that are integrated with * can be found below: * * <ul> * <li> {@link HttpServletRequest#authenticate(HttpServletResponse)} - Allows the user to * determine if they are authenticated and if not send the user to the login page. See * {@link #setAuthenticationEntryPoint(AuthenticationEntryPoint)}.</li> * <li> {@link HttpServletRequest#login(String, String)} - Allows the user to authenticate * using the {@link AuthenticationManager}. See * {@link #setAuthenticationManager(AuthenticationManager)}.</li> * <li> {@link HttpServletRequest#logout()} - Allows the user to logout using the * {@link LogoutHandler}s configured in Spring Security. See * {@link #setLogoutHandlers(List)}.</li> * <li> {@link AsyncContext#start(Runnable)} - Automatically copy the * {@link SecurityContext} from the {@link SecurityContextHolder} found on the Thread that * invoked {@link AsyncContext#start(Runnable)} to the Thread that processes the * {@link Runnable}.</li> * </ul> * * @author Rob Winch * * @see SecurityContextHolderAwareRequestFilter * @see HttpServlet25RequestFactory * @see Servlet3SecurityContextHolderAwareRequestWrapper * @see SecurityContextAsyncContext */ final class HttpServlet3RequestFactory implements HttpServletRequestFactory { private Log logger = LogFactory.getLog(getClass()); private final String rolePrefix; private AuthenticationTrustResolver trustResolver = new AuthenticationTrustResolverImpl(); private AuthenticationEntryPoint authenticationEntryPoint; private AuthenticationManager authenticationManager; private List<LogoutHandler> logoutHandlers; HttpServlet3RequestFactory(String rolePrefix) { this.rolePrefix = rolePrefix; } /** * <p> * Sets the {@link AuthenticationEntryPoint} used when integrating * {@link HttpServletRequest} with Servlet 3 APIs. Specifically, it will be used when * {@link HttpServletRequest#authenticate(HttpServletResponse)} is called and the user * is not authenticated. * </p> * <p> * If the value is null (default), then the default container behavior will be be * retained when invoking {@link HttpServletRequest#authenticate(HttpServletResponse)} * . * </p> * @param authenticationEntryPoint the {@link AuthenticationEntryPoint} to use when * invoking {@link HttpServletRequest#authenticate(HttpServletResponse)} if the user * is not authenticated. */ public void setAuthenticationEntryPoint( AuthenticationEntryPoint authenticationEntryPoint) { this.authenticationEntryPoint = authenticationEntryPoint; } /** * <p> * Sets the {@link AuthenticationManager} used when integrating * {@link HttpServletRequest} with Servlet 3 APIs. Specifically, it will be used when * {@link HttpServletRequest#login(String, String)} is invoked to determine if the * user is authenticated. * </p> * <p> * If the value is null (default), then the default container behavior will be * retained when invoking {@link HttpServletRequest#login(String, String)}. * </p> * * @param authenticationManager the {@link AuthenticationManager} to use when invoking * {@link HttpServletRequest#login(String, String)} */ public void setAuthenticationManager(AuthenticationManager authenticationManager) { this.authenticationManager = authenticationManager; } /** * <p> * Sets the {@link LogoutHandler}s used when integrating with * {@link HttpServletRequest} with Servlet 3 APIs. Specifically it will be used when * {@link HttpServletRequest#logout()} is invoked in order to log the user out. So * long as the {@link LogoutHandler}s do not commit the {@link HttpServletResponse} * (expected), then the user is in charge of handling the response. * </p> * <p> * If the value is null (default), the default container behavior will be retained * when invoking {@link HttpServletRequest#logout()}. * </p> * * @param logoutHandlers the {@link List<LogoutHandler>}s when invoking * {@link HttpServletRequest#logout()}. */ public void setLogoutHandlers(List<LogoutHandler> logoutHandlers) { this.logoutHandlers = logoutHandlers; } /** * Sets the {@link AuthenticationTrustResolver} to be used. The default is * {@link AuthenticationTrustResolverImpl}. * * @param trustResolver the {@link AuthenticationTrustResolver} to use. Cannot be * null. */ public void setTrustResolver(AuthenticationTrustResolver trustResolver) { Assert.notNull(trustResolver, "trustResolver cannot be null"); this.trustResolver = trustResolver; } public HttpServletRequest create(HttpServletRequest request, HttpServletResponse response) { return new Servlet3SecurityContextHolderAwareRequestWrapper(request, rolePrefix, response); } private class Servlet3SecurityContextHolderAwareRequestWrapper extends SecurityContextHolderAwareRequestWrapper { private final HttpServletResponse response; public Servlet3SecurityContextHolderAwareRequestWrapper( HttpServletRequest request, String rolePrefix, HttpServletResponse response) { super(request, trustResolver, rolePrefix); this.response = response; } public AsyncContext getAsyncContext() { AsyncContext asyncContext = super.getAsyncContext(); return new SecurityContextAsyncContext(asyncContext); } public AsyncContext startAsync() { AsyncContext startAsync = super.startAsync(); return new SecurityContextAsyncContext(startAsync); } public AsyncContext startAsync(ServletRequest servletRequest, ServletResponse servletResponse) throws IllegalStateException { AsyncContext startAsync = super.startAsync(servletRequest, servletResponse); return new SecurityContextAsyncContext(startAsync); } public boolean authenticate(HttpServletResponse response) throws IOException, ServletException { AuthenticationEntryPoint entryPoint = authenticationEntryPoint; if (entryPoint == null) { logger.debug("authenticationEntryPoint is null, so allowing original HttpServletRequest to handle authenticate"); return super.authenticate(response); } if (isAuthenticated()) { return true; } entryPoint.commence(this, response, new AuthenticationCredentialsNotFoundException( "User is not Authenticated")); return false; } public void login(String username, String password) throws ServletException { if (isAuthenticated()) { throw new ServletException("Cannot perform login for '" + username + "' already authenticated as '" + getRemoteUser() + "'"); } AuthenticationManager authManager = authenticationManager; if (authManager == null) { logger.debug("authenticationManager is null, so allowing original HttpServletRequest to handle login"); super.login(username, password); return; } Authentication authentication; try { authentication = authManager .authenticate(new UsernamePasswordAuthenticationToken(username, password)); } catch (AuthenticationException loginFailed) { SecurityContextHolder.clearContext(); throw new ServletException(loginFailed.getMessage(), loginFailed); } SecurityContextHolder.getContext().setAuthentication(authentication); } public void logout() throws ServletException { List<LogoutHandler> handlers = logoutHandlers; if (handlers == null) { logger.debug("logoutHandlers is null, so allowing original HttpServletRequest to handle logout"); super.logout(); return; } Authentication authentication = SecurityContextHolder.getContext() .getAuthentication(); for (LogoutHandler logoutHandler : handlers) { logoutHandler.logout(this, response, authentication); } } private boolean isAuthenticated() { Principal userPrincipal = getUserPrincipal(); return userPrincipal != null; } } private static class SecurityContextAsyncContext implements AsyncContext { private final AsyncContext asyncContext; public SecurityContextAsyncContext(AsyncContext asyncContext) { this.asyncContext = asyncContext; } public ServletRequest getRequest() { return asyncContext.getRequest(); } public ServletResponse getResponse() { return asyncContext.getResponse(); } public boolean hasOriginalRequestAndResponse() { return asyncContext.hasOriginalRequestAndResponse(); } public void dispatch() { asyncContext.dispatch(); } public void dispatch(String path) { asyncContext.dispatch(path); } public void dispatch(ServletContext context, String path) { asyncContext.dispatch(context, path); } public void complete() { asyncContext.complete(); } public void start(Runnable run) { asyncContext.start(new DelegatingSecurityContextRunnable(run)); } public void addListener(AsyncListener listener) { asyncContext.addListener(listener); } public void addListener(AsyncListener listener, ServletRequest request, ServletResponse response) { asyncContext.addListener(listener, request, response); } public <T extends AsyncListener> T createListener(Class<T> clazz) throws ServletException { return asyncContext.createListener(clazz); } public long getTimeout() { return asyncContext.getTimeout(); } public void setTimeout(long timeout) { asyncContext.setTimeout(timeout); } } }
/* Copyright 2011-2021 Frederic Langlet Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. you may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package kanzi.bitstream; import java.io.IOException; import java.io.InputStream; import kanzi.Memory; import kanzi.BitStreamException; import kanzi.InputBitStream; public final class DefaultInputBitStream implements InputBitStream { private final InputStream is; private final byte[] buffer; private int position; // index of current byte (consumed if bitIndex == -1) private int availBits; // bits not consumed in current private long read; private boolean closed; private int maxPosition; private long current; public DefaultInputBitStream(InputStream is, int bufferSize) { if (is == null) throw new NullPointerException("Invalid null input stream parameter"); if (bufferSize < 1024) throw new IllegalArgumentException("Invalid buffer size (must be at least 1024)"); if (bufferSize > 1<<28) throw new IllegalArgumentException("Invalid buffer size (must be at most 268435456)"); if ((bufferSize & 7) != 0) throw new IllegalArgumentException("Invalid buffer size (must be a multiple of 8)"); this.is = is; this.buffer = new byte[bufferSize]; this.availBits = 0; this.maxPosition = -1; } // Return 1 or 0. Trigger exception if stream is closed @Override public int readBit() throws BitStreamException { if (this.availBits == 0) this.pullCurrent(); // Triggers an exception if stream is closed this.availBits--; return (int) (this.current >> this.availBits) & 1; } private int readFromInputStream(int count) throws BitStreamException { if (this.isClosed() == true) throw new BitStreamException("Stream closed", BitStreamException.STREAM_CLOSED); if (count == 0) return 0; int size = -1; try { size = this.is.read(this.buffer, 0, count); if (size <= 0) { throw new BitStreamException("No more data to read in the bitstream", BitStreamException.END_OF_STREAM); } return size; } catch (IOException e) { throw new BitStreamException(e.getMessage(), BitStreamException.END_OF_STREAM); } finally { this.position = 0; this.read += (((long) this.maxPosition+1) << 3); this.maxPosition = (size <= 0) ? -1 : size - 1; } } // Return value of 'count' next bits as a long. Trigger exception if stream is closed @Override public long readBits(int count) throws BitStreamException { if (((count-1) & -64) != 0) throw new IllegalArgumentException("Invalid bit count: "+count+" (must be in [1..64])"); if (count <= this.availBits) { // Enough spots available in 'current' this.availBits -= count; return (this.current >>> this.availBits) & (-1L >>> -count); } // Not enough spots available in 'current' count -= this.availBits; final long res = this.current & ((1L << this.availBits) - 1); this.pullCurrent(); this.availBits -= count; return (res << count) | (this.current >>> this.availBits); } @Override public int readBits(byte[] bits, int start, int count) throws BitStreamException { if (this.isClosed() == true) throw new BitStreamException("Stream closed", BitStreamException.STREAM_CLOSED); if ((count < 0) || ((count>>3) > bits.length-start)) throw new IllegalArgumentException("Invalid bit count: "+count+" (must be in [1.." + (((long)(bits.length-start))<<3) + "])"); if (count == 0) return 0; int remaining = count; // Byte aligned cursor ? if ((this.availBits & 7) == 0) { if (this.availBits == 0) this.pullCurrent(); // Empty this.current while ((this.availBits > 0) && (remaining >= 8)) { bits[start] = (byte) this.readBits(8); start++; remaining -= 8; } // Copy internal buffer to bits array while ((remaining>>3) > this.maxPosition+1-this.position) { System.arraycopy(this.buffer, this.position, bits, start, this.maxPosition+1-this.position); start += (this.maxPosition+1-this.position); remaining -= ((this.maxPosition+1-this.position)<<3); this.readFromInputStream(this.buffer.length); } final int r = (remaining>>6) << 3; if (r > 0) { System.arraycopy(this.buffer, this.position, bits, start, r); this.position += r; start += r; remaining -= (r<<3); } } else { // Not byte aligned final int r = 64 - this.availBits; while (remaining >= 64) { final long v = this.current & ((1L<<this.availBits)-1); this.pullCurrent(); this.availBits -= r; Memory.BigEndian.writeLong64(bits, start, (v<<r) | (this.current>>>this.availBits)); start += 8; remaining -= 64; } } // Last bytes while (remaining >= 8) { bits[start] = (byte) this.readBits(8); start++; remaining -= 8; } if (remaining > 0) bits[start] = (byte) (this.readBits(remaining)<<(8-remaining)); return count; } // Pull 64 bits of current value from buffer. private void pullCurrent() { if (this.position > this.maxPosition) this.readFromInputStream(this.buffer.length); if (this.position+7 > this.maxPosition) { // End of stream: overshoot max position => adjust bit index int shift = (this.maxPosition - this.position) << 3; this.availBits = shift + 8; long val = 0; while (this.position <= this.maxPosition) { val |= (((long) (this.buffer[this.position++] & 0xFF)) << shift); shift -= 8; } this.current = val; } else { // Regular processing, buffer length is multiple of 8 this.current = Memory.BigEndian.readLong64(this.buffer, this.position); this.availBits = 64; this.position += 8; } } @Override public void close() { if (this.isClosed() == true) return; this.closed = true; // Reset fields to force a readFromInputStream() and trigger an exception // on readBit() or readBits() this.read -= this.availBits; this.availBits = 0; this.maxPosition = -1; } // Return number of bits read so far @Override public long read() { return this.read + (this.position<<3) - this.availBits; } @Override public boolean hasMoreToRead() { if (this.isClosed() == true) return false; if ((this.position < this.maxPosition) || (this.availBits > 0)) return true; try { this.readFromInputStream(this.buffer.length); } catch (BitStreamException e) { return false; } return true; } public boolean isClosed() { return this.closed; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.mapreduce.util; import java.sql.Types; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.phoenix.query.QueryConstants; import org.apache.phoenix.schema.PColumn; import org.apache.phoenix.schema.PTable; import org.apache.phoenix.schema.types.PDataType; import org.apache.phoenix.util.IndexUtil; import org.apache.phoenix.util.SchemaUtil; import com.google.common.base.Function; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; /** * Gets index column names and their data table equivalents */ public class IndexColumnNames { private List<String> dataNonPkColNames = Lists.newArrayList(); private List<String> dataPkColNames = Lists.newArrayList(); private List<String> dataColNames; protected List<String> dataColSqlTypeNames = Lists.newArrayList(); private List<String> indexPkColNames = Lists.newArrayList(); private List<String> indexNonPkColNames = Lists.newArrayList(); private List<String> indexColNames; protected List<String> indexColSqlTypeNames = Lists.newArrayList(); private PTable pdataTable; private PTable pindexTable; public IndexColumnNames(final PTable pdataTable, final PTable pindexTable) { this.pdataTable = pdataTable; this.pindexTable = pindexTable; List<PColumn> pindexCols = pindexTable.getColumns(); List<PColumn> pkColumns = pindexTable.getPKColumns(); Set<String> indexColsAdded = new HashSet<String>(); int offset = 0; if (pindexTable.getBucketNum() != null) { offset++; } if (pindexTable.getViewIndexId() != null) { offset++; } if (pindexTable.isMultiTenant()) { offset++; } if (offset > 0) { pindexCols = pindexCols.subList(offset, pindexCols.size()); pkColumns = pkColumns.subList(offset, pkColumns.size()); } // first add the data pk columns for (PColumn indexCol : pindexCols) { if (IndexUtil.isDataPKColumn(indexCol)) { String indexColumnName = indexCol.getName().getString(); PColumn dPkCol = IndexUtil.getDataColumn(pdataTable, indexColumnName); dataPkColNames.add(getDataColFullName(dPkCol)); dataColSqlTypeNames.add(getDataTypeString(dPkCol)); indexPkColNames.add(indexColumnName); indexColSqlTypeNames.add(getDataTypeString(indexCol)); indexColsAdded.add(indexColumnName); } } // then the rest of the index pk columns for (PColumn indexPkCol : pkColumns) { String indexColName = indexPkCol.getName().getString(); if (!indexColsAdded.contains(indexColName)) { indexPkColNames.add(indexColName); indexColSqlTypeNames.add(getDataTypeString(indexPkCol)); PColumn dCol = IndexUtil.getDataColumn(pdataTable, indexColName); dataNonPkColNames.add(getDataColFullName(dCol)); dataColSqlTypeNames.add(getDataTypeString(dCol)); indexColsAdded.add(indexColName); } } // then the covered columns (rest of the columns) for (PColumn indexCol : pindexCols) { String indexColName = indexCol.getName().getString(); if (!indexColsAdded.contains(indexColName)) { indexNonPkColNames.add(indexColName); indexColSqlTypeNames.add(getDataTypeString(indexCol)); PColumn dCol = IndexUtil.getDataColumn(pdataTable, indexColName); dataNonPkColNames.add(getDataColFullName(dCol)); dataColSqlTypeNames.add(getDataTypeString(dCol)); } } indexColNames = Lists.newArrayList(Iterables.concat(indexPkColNames, indexNonPkColNames)); dataColNames = Lists.newArrayList(Iterables.concat(dataPkColNames, dataNonPkColNames)); } private String getDataTypeString(PColumn col) { PDataType<?> dataType = col.getDataType(); switch (dataType.getSqlType()) { case Types.DECIMAL: String typeStr = dataType.toString(); if (col.getMaxLength() != null) { typeStr += "(" + col.getMaxLength().toString(); if (col.getScale() != null) { typeStr += "," + col.getScale().toString(); } typeStr += ")"; } return typeStr; default: if (col.getMaxLength() != null) { return String.format("%s(%s)", dataType.toString(), col.getMaxLength()); } return dataType.toString(); } } private String getDataColFullName(PColumn dCol) { String dColFullName = ""; if (dCol.getFamilyName() != null) { dColFullName += dCol.getFamilyName().getString() + QueryConstants.NAME_SEPARATOR; } dColFullName += dCol.getName().getString(); return dColFullName; } private List<String> getDynamicCols(List<String> colNames, List<String> colTypes) { List<String> dynamicCols = Lists.newArrayListWithCapacity(colNames.size()); for (int i = 0; i < colNames.size(); i++) { String dataColName = colNames.get(i); String dataColType = colTypes.get(i); String dynamicCol = SchemaUtil.getEscapedFullColumnName(dataColName) + " " + dataColType; dynamicCols.add(dynamicCol); } return dynamicCols; } private List<String> getUnqualifiedColNames(List<String> qualifiedCols) { return Lists.transform(qualifiedCols, new Function<String, String>() { @Override public String apply(String qCol) { return SchemaUtil.getTableNameFromFullName(qCol, QueryConstants.NAME_SEPARATOR); } }); } protected List<String> getCastedColumnNames(List<String> colNames, List<String> castTypes) { List<String> castColNames = Lists.newArrayListWithCapacity(colNames.size()); colNames = SchemaUtil.getEscapedFullColumnNames(colNames); for (int i = 0; i < colNames.size(); i++) { castColNames.add("CAST(" + colNames.get(i) + " AS " + castTypes.get(i) + ")"); } return castColNames; } public String getQualifiedDataTableName() { return SchemaUtil.getQualifiedTableName(pdataTable.getSchemaName().getString(), pdataTable.getTableName().getString()); } public String getQualifiedIndexTableName() { return SchemaUtil.getQualifiedTableName(pindexTable.getSchemaName().getString(), pindexTable.getTableName().getString()); } /** * @return the escaped data column names (equivalents for the index columns) along with their * sql type, for use in dynamic column queries/upserts */ public List<String> getDynamicDataCols() { // don't want the column family for dynamic columns return getDynamicCols(getUnqualifiedDataColNames(), dataColSqlTypeNames); } /** * @return the escaped index column names along with their sql type, for use in dynamic column * queries/upserts */ public List<String> getDynamicIndexCols() { // don't want the column family for dynamic columns return getDynamicCols(getUnqualifiedIndexColNames(), indexColSqlTypeNames); } /** * @return the corresponding data table column names for the index columns, leading with the * data table pk columns */ public List<String> getDataColNames() { return dataColNames; } /** * @return same as getDataColNames, without the column family qualifier */ public List<String> getUnqualifiedDataColNames() { return getUnqualifiedColNames(dataColNames); } /** * @return the corresponding data table column names for the index columns, which are not part * of the data table pk */ public List<String> getDataNonPkColNames() { return dataNonPkColNames; } /** * @return the corresponding data table column names for the index columns, which are part of * the data table pk */ public List<String> getDataPkColNames() { return dataPkColNames; } /** * @return the index column names, leading with the data table pk columns */ public List<String> getIndexColNames() { return indexColNames; } /** * @return same as getIndexColNames, without the column family qualifier */ public List<String> getUnqualifiedIndexColNames() { return getUnqualifiedColNames(indexColNames); } /** * @return the index pk column names */ public List<String> getIndexPkColNames() { return indexPkColNames; } }
/* * Copyright 2020 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.pmml.models.mining.evaluator; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import org.drools.core.RuleBaseConfiguration; import org.drools.core.impl.InternalKnowledgeBase; import org.drools.core.impl.KnowledgeBaseFactory; import org.drools.core.impl.KnowledgeBaseImpl; import org.drools.core.util.StringUtils; import org.kie.api.KieBase; import org.kie.api.definition.KiePackage; import org.kie.api.pmml.PMML4Result; import org.kie.api.runtime.KieRuntimeFactory; import org.kie.pmml.commons.enums.ResultCode; import org.kie.pmml.commons.exceptions.KieEnumException; import org.kie.pmml.commons.exceptions.KiePMMLException; import org.kie.pmml.commons.exceptions.KiePMMLInternalException; import org.kie.pmml.commons.model.KiePMMLModel; import org.kie.pmml.commons.model.enums.PMML_MODEL; import org.kie.pmml.commons.model.predicates.KiePMMLPredicate; import org.kie.pmml.commons.model.tuples.KiePMMLNameValue; import org.kie.pmml.commons.model.tuples.KiePMMLValueWeight; import org.kie.pmml.evaluator.api.exceptions.KiePMMLModelException; import org.kie.pmml.evaluator.api.executor.PMMLContext; import org.kie.pmml.evaluator.api.executor.PMMLRuntime; import org.kie.pmml.evaluator.core.executor.PMMLModelEvaluator; import org.kie.pmml.models.mining.model.KiePMMLMiningModel; import org.kie.pmml.models.mining.model.enums.MULTIPLE_MODEL_METHOD; import org.kie.pmml.models.mining.model.segmentation.KiePMMLSegment; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.kie.pmml.commons.enums.ResultCode.FAIL; import static org.kie.pmml.commons.enums.ResultCode.OK; import static org.kie.pmml.evaluator.core.utils.Converter.getUnwrappedParametersMap; /** * Default <code>PMMLModelExecutor</code> for <b>Mining</b> */ public class PMMLMiningModelEvaluator implements PMMLModelEvaluator<KiePMMLMiningModel> { private static final Logger logger = LoggerFactory.getLogger(PMMLMiningModelEvaluator.class.getName()); private static final String EXPECTED_A_KIE_PMMLMINING_MODEL_RECEIVED = "Expected a KiePMMLMiningModel, received %s"; private static final String TARGET_FIELD_REQUIRED_RETRIEVED = "TargetField required, retrieved %s"; private static final Map<String, InternalKnowledgeBase> MAPPED_KIEBASES = new HashMap<>(); @Override public PMML_MODEL getPMMLModelType() { return PMML_MODEL.MINING_MODEL; } @Override public PMML4Result evaluate(final KieBase knowledgeBase, final KiePMMLMiningModel model, final PMMLContext pmmlContext) { validate(model); return evaluateMiningModel((KiePMMLMiningModel) model, pmmlContext, knowledgeBase); } PMML4Result getPMML4Result(final KiePMMLMiningModel toEvaluate, final LinkedHashMap<String, KiePMMLNameValue> inputData) { final MULTIPLE_MODEL_METHOD multipleModelMethod = toEvaluate.getSegmentation().getMultipleModelMethod(); Object prediction = null; ResultCode resultCode = OK; try { prediction = multipleModelMethod.apply(inputData); } catch (KieEnumException e) { logger.warn(e.getMessage()); resultCode = FAIL; } PMML4Result toReturn = new PMML4Result(); toReturn.addResultVariable(toEvaluate.getTargetField(), prediction); toReturn.setResultObjectName(toEvaluate.getTargetField()); toReturn.setResultCode(resultCode.getName()); toEvaluate.getOutputFieldsMap().forEach(toReturn::addResultVariable); return toReturn; } /** * Retrieve the <code>PMMLRuntime</code> to be used for the given <b>segment</b> * It creates new <code>InternalKnowledgeBase</code>s and store them in a <code>Map</code>, * to reuse them. * @param kModulePackageName * @param knowledgeBase * @param containerModelName * @return */ PMMLRuntime getPMMLRuntime(final String kModulePackageName, final KieBase knowledgeBase, final String containerModelName) { final String key = containerModelName + "_" + kModulePackageName; InternalKnowledgeBase kieBase = MAPPED_KIEBASES.computeIfAbsent(key, s -> { final KiePackage kiePackage = knowledgeBase.getKiePackage(kModulePackageName); final List<KiePackage> packages = kiePackage != null ? Collections.singletonList(knowledgeBase.getKiePackage(kModulePackageName)) : Collections.emptyList(); RuleBaseConfiguration conf = new RuleBaseConfiguration(); conf.setClassLoader(((KnowledgeBaseImpl) knowledgeBase).getRootClassLoader()); InternalKnowledgeBase toReturn = KnowledgeBaseFactory.newKnowledgeBase(kModulePackageName, conf); toReturn.addPackages(packages); return toReturn; }); KieRuntimeFactory kieRuntimeFactory = KieRuntimeFactory.of(kieBase); return kieRuntimeFactory.get(PMMLRuntime.class); } /** * Returns a <code>KiePMMLNameValue</code> representation of the <code>PMML4Result</code>. * <b>It is based on the assumption there is only one result to be considered, defined as</b> * {@link PMML4Result#getResultObjectName() } * @param result * @param multipleModelMethod * @param weight * @return * @throws KiePMMLException */ KiePMMLNameValue getKiePMMLNameValue(PMML4Result result, MULTIPLE_MODEL_METHOD multipleModelMethod, double weight) { String fieldName = result.getResultObjectName(); Object retrieved = getEventuallyWeightedResult(result.getResultVariables().get(fieldName), multipleModelMethod, weight); return new KiePMMLNameValue(fieldName, retrieved); } /** * Returns a <code>KiePMMLValueWeight</code> if the given <code>MULTIPLE_MODEL_METHOD</code> expect it; * the original <b>rawObject</b>, otherwise * @param rawObject * @param multipleModelMethod * @param weight * @return * @throws KiePMMLException */ Object getEventuallyWeightedResult(Object rawObject, MULTIPLE_MODEL_METHOD multipleModelMethod, double weight) { switch (multipleModelMethod) { case MAJORITY_VOTE: case SELECT_ALL: case SELECT_FIRST: return rawObject; case MAX: case SUM: case MEDIAN: case AVERAGE: case WEIGHTED_SUM: case WEIGHTED_MEDIAN: case WEIGHTED_AVERAGE: if (!(rawObject instanceof Number)) { throw new KiePMMLException("Expected a number, retrieved " + rawObject.getClass().getName()); } return new KiePMMLValueWeight(((Number) rawObject).doubleValue(), weight); case MODEL_CHAIN: case WEIGHTED_MAJORITY_VOTE: throw new KiePMMLException(multipleModelMethod + " not implemented, yet"); default: throw new KiePMMLException("Unrecognized MULTIPLE_MODEL_METHOD " + multipleModelMethod); } } void validate(final KiePMMLModel toValidate) { if (!(toValidate instanceof KiePMMLMiningModel)) { throw new KiePMMLModelException(String.format(EXPECTED_A_KIE_PMMLMINING_MODEL_RECEIVED, toValidate.getClass().getName())); } validateMining((KiePMMLMiningModel) toValidate); } void validateMining(final KiePMMLMiningModel toValidate) { if (toValidate.getTargetField() == null || StringUtils.isEmpty(toValidate.getTargetField().trim())) { throw new KiePMMLInternalException(String.format(TARGET_FIELD_REQUIRED_RETRIEVED, toValidate.getTargetField())); } } /** * Evaluate the whole <code>KiePMMLMiningModel</code> * Being it a <b>meta</b> model, it actually works as the top-level PMML model, * recursively and indirectly invoking model-specific evaluators (through <code>PMMLRuntime</code> container) * * @param toEvaluate * @param pmmlContext * @param knowledgeBase * @return */ private PMML4Result evaluateMiningModel(final KiePMMLMiningModel toEvaluate, final PMMLContext pmmlContext, final KieBase knowledgeBase) { final MULTIPLE_MODEL_METHOD multipleModelMethod = toEvaluate.getSegmentation().getMultipleModelMethod(); final List<KiePMMLSegment> segments = toEvaluate.getSegmentation().getSegments(); final LinkedHashMap<String, KiePMMLNameValue> inputData = new LinkedHashMap<>(); for (KiePMMLSegment segment : segments) { Optional<PMML4Result> segmentResult = evaluateSegment(segment, pmmlContext, knowledgeBase, toEvaluate.getName()); segmentResult.ifPresent(pmml4Result -> { KiePMMLNameValue kiePMMLNameValue = getKiePMMLNameValue(pmml4Result, multipleModelMethod, segment.getWeight()); inputData.put(segment.getId(), kiePMMLNameValue); }); } return getPMML4Result(toEvaluate, inputData); } /** * Evaluate the model contained in the <code>KiePMMLSegment</code>, indirectly invoking * the model-specific evaluator (through <code>PMMLRuntime</code> container) * * @param toEvaluate * @param pmmlContext * @param knowledgeBase * @param containerModelName * @return */ private Optional<PMML4Result> evaluateSegment(final KiePMMLSegment toEvaluate, final PMMLContext pmmlContext, final KieBase knowledgeBase, final String containerModelName) { logger.trace("evaluateSegment {}", toEvaluate.getId()); final KiePMMLPredicate kiePMMLPredicate = toEvaluate.getKiePMMLPredicate(); Optional<PMML4Result> toReturn = Optional.empty(); Map<String, Object> values = getUnwrappedParametersMap(pmmlContext.getRequestData().getMappedRequestParams()); String modelName = toEvaluate.getModel().getName(); if (kiePMMLPredicate != null && kiePMMLPredicate.evaluate(values)) { final PMMLRuntime pmmlRuntime = getPMMLRuntime(toEvaluate.getModel().getKModulePackageName(), knowledgeBase, containerModelName); logger.trace("{}: matching predicate, evaluating... ", toEvaluate.getId()); toReturn = Optional.of(pmmlRuntime.evaluate(modelName, pmmlContext)); } return toReturn; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe; import com.google.devtools.build.lib.skylarkbuildapi.FilesetEntryApi; import com.google.devtools.build.lib.syntax.Printer; import com.google.devtools.build.lib.syntax.StarlarkValue; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Set; import javax.annotation.Nullable; /** * FilesetEntry is a value object used to represent a "FilesetEntry" inside a "Fileset" BUILD rule. */ @Immutable @ThreadSafe public final class FilesetEntry implements StarlarkValue, FilesetEntryApi { public static final SymlinkBehavior DEFAULT_SYMLINK_BEHAVIOR = SymlinkBehavior.COPY; public static final String DEFAULT_STRIP_PREFIX = "."; public static final String STRIP_PREFIX_WORKSPACE = "%workspace%"; @Override public boolean isImmutable() { // TODO(laszlocsomor): set this to true. I think we could do this right now, but am not sure. // Maybe we have to verify that Skylark recognizes every member's type to be recursively // immutable; as of 15/01/2016 this is not true for enum types in general, to name an example. return false; } public static List<String> makeStringList(List<Label> labels) { if (labels == null) { return Collections.emptyList(); } List<String> strings = Lists.newArrayListWithCapacity(labels.size()); for (Label label : labels) { strings.add(label.toString()); } return strings; } public static List<?> makeList(Collection<?> list) { return list == null ? Lists.newArrayList() : Lists.newArrayList(list); } @Override public void repr(Printer printer) { printer.append("FilesetEntry(srcdir = "); printer.repr(getSrcLabel().toString()); printer.append(", files = "); printer.repr(makeStringList(getFiles())); printer.append(", excludes = "); printer.repr(makeList(getExcludes())); printer.append(", destdir = "); printer.repr(getDestDir().getPathString()); printer.append(", strip_prefix = "); printer.repr(getStripPrefix()); printer.append(", symlinks = "); printer.repr(getSymlinkBehavior().toString()); printer.append(")"); } /** SymlinkBehavior decides what to do when a source file of a FilesetEntry is a symlink. */ @Immutable @ThreadSafe public enum SymlinkBehavior { /** Just copies the symlink as-is. May result in dangling links. */ COPY, /** Follow the link and make the destination point to the absolute path of the final target. */ DEREFERENCE; public static SymlinkBehavior parse(String value) throws IllegalArgumentException { return valueOf(value.toUpperCase(Locale.ENGLISH)); } @Override public String toString() { return super.toString().toLowerCase(); } } private final Label srcLabel; @Nullable private final ImmutableList<Label> files; @Nullable private final ImmutableSet<String> excludes; private final PathFragment destDir; private final SymlinkBehavior symlinkBehavior; private final String stripPrefix; /** * Constructs a FilesetEntry with the given values. * * @param srcLabel the label of the source directory. Must be non-null. * @param files The explicit files to include. May be null. * @param excludes The files to exclude. Man be null. May only be non-null if files is null. * @param destDir The target-relative output directory. * @param symlinkBehavior how to treat symlinks on the input. See * {@link FilesetEntry.SymlinkBehavior}. * @param stripPrefix the prefix to strip from the package-relative path. If ".", keep only the * basename. */ public FilesetEntry( Label srcLabel, @Nullable List<Label> files, @Nullable Collection<String> excludes, @Nullable String destDir, @Nullable SymlinkBehavior symlinkBehavior, @Nullable String stripPrefix) { this.srcLabel = Preconditions.checkNotNull(srcLabel); this.files = files == null ? null : ImmutableList.copyOf(files); this.excludes = (excludes == null || excludes.isEmpty()) ? null : ImmutableSet.copyOf(excludes); this.destDir = PathFragment.create((destDir == null) ? "" : destDir); this.symlinkBehavior = symlinkBehavior == null ? DEFAULT_SYMLINK_BEHAVIOR : symlinkBehavior; this.stripPrefix = stripPrefix == null ? DEFAULT_STRIP_PREFIX : stripPrefix; } /** * @return the source label. */ public Label getSrcLabel() { return srcLabel; } /** * @return the destDir. Non null. */ public PathFragment getDestDir() { return destDir; } /** * @return how symlinks should be handled. */ public SymlinkBehavior getSymlinkBehavior() { return symlinkBehavior; } /** * @return an immutable set of excludes. Null if none specified. */ @Nullable public ImmutableSet<String> getExcludes() { return excludes; } /** * @return an immutable list of file labels. Null if none specified. */ @Nullable public ImmutableList<Label> getFiles() { return files; } /** * @return true if this Fileset should get files from the source directory. */ public boolean isSourceFileset() { return "BUILD".equals(srcLabel.getName()); } /** * @return all prerequisite labels in the FilesetEntry. */ public Collection<Label> getLabels() { Set<Label> labels = new LinkedHashSet<>(); if (files != null) { labels.addAll(files); } else { labels.add(srcLabel); } return labels; } /** * @return the prefix that should be stripped from package-relative path names. */ public String getStripPrefix() { return stripPrefix; } /** * @return null if the entry is valid, and a human-readable error message otherwise. */ @Nullable public String validate() { if (excludes != null && files != null) { return "Cannot specify both 'files' and 'excludes' in a FilesetEntry"; } else if (files != null && !isSourceFileset()) { return "Cannot specify files with Fileset label '" + srcLabel + "'"; } else if (destDir.isAbsolute()) { return "Cannot specify absolute destdir '" + destDir + "'"; } else if (!stripPrefix.equals(DEFAULT_STRIP_PREFIX) && files == null) { return "If the strip prefix is not \"" + DEFAULT_STRIP_PREFIX + "\", files must be specified"; } else if (stripPrefix.startsWith("/")) { return "Cannot specify absolute strip prefix; perhaps you need to use \"" + STRIP_PREFIX_WORKSPACE + "\""; } else if (PathFragment.create(stripPrefix).containsUplevelReferences()) { return "Strip prefix must not contain uplevel references"; } else if (stripPrefix.startsWith("%") && !stripPrefix.startsWith(STRIP_PREFIX_WORKSPACE)) { return "If the strip_prefix starts with \"%\" then it must start with \"" + STRIP_PREFIX_WORKSPACE + "\""; } else { return null; } } @Override public String toString() { return String.format( "FilesetEntry(srcdir=%s, destdir=%s, strip_prefix=%s, symlinks=%s, " + "%d file(s) and %d excluded)", srcLabel, destDir, stripPrefix, symlinkBehavior, files != null ? files.size() : 0, excludes != null ? excludes.size() : 0); } @Override public int hashCode() { return Objects.hashCode(srcLabel, files, excludes, destDir, symlinkBehavior, stripPrefix); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof FilesetEntry)) { return false; } FilesetEntry that = (FilesetEntry) other; return Objects.equal(srcLabel, that.srcLabel) && Objects.equal(files, that.files) && Objects.equal(excludes, that.excludes) && Objects.equal(destDir, that.destDir) && Objects.equal(symlinkBehavior, that.symlinkBehavior) && Objects.equal(stripPrefix, that.stripPrefix); } }
/* * Copyright 2000-2014 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.client.renderers; import com.google.gwt.dom.client.BrowserEvents; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.EventTarget; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.dom.client.DomEvent; import com.google.gwt.event.dom.client.MouseEvent; import com.google.gwt.event.shared.EventHandler; import com.google.gwt.event.shared.HandlerManager; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.Widget; import com.google.web.bindery.event.shared.HandlerRegistration; import com.vaadin.client.WidgetUtil; import com.vaadin.client.widget.escalator.Cell; import com.vaadin.client.widget.escalator.RowContainer; import com.vaadin.client.widget.grid.CellReference; import com.vaadin.client.widget.grid.EventCellReference; import com.vaadin.client.widgets.Escalator; import com.vaadin.client.widgets.Grid; import com.vaadin.shared.ui.grid.GridConstants.Section; /** * An abstract superclass for renderers that render clickable widgets. Click * handlers can be added to a renderer to listen to click events emitted by all * widgets rendered by the renderer. * * @param <T> * the presentation (column) type * @param <W> * the widget type * * @since 7.4 * @author Vaadin Ltd */ public abstract class ClickableRenderer<T, W extends Widget> extends WidgetRenderer<T, W> implements ClickHandler { /** * A handler for {@link RendererClickEvent renderer click events}. * * @param <R> * the row type of the containing Grid * * @see {@link ButtonRenderer#addClickHandler(RendererClickHandler)} */ public interface RendererClickHandler<R> extends EventHandler { /** * Called when a rendered button is clicked. * * @param event * the event representing the click */ void onClick(RendererClickEvent<R> event); } /** * An event fired when a widget rendered by a ClickableWidgetRenderer * subclass is clicked. * * @param <R> * the row type of the containing Grid */ @SuppressWarnings("rawtypes") public static class RendererClickEvent<R> extends MouseEvent<RendererClickHandler> { @SuppressWarnings("unchecked") static final Type<RendererClickHandler> TYPE = new Type<RendererClickHandler>( BrowserEvents.CLICK, new RendererClickEvent()); private CellReference<R> cell; private R row; private RendererClickEvent() { } /** * Returns the cell of the clicked button. * * @return the cell */ public CellReference<R> getCell() { return cell; } /** * Returns the data object corresponding to the row of the clicked * button. * * @return the row data object */ public R getRow() { return row; } @Override public Type<RendererClickHandler> getAssociatedType() { return TYPE; } @Override @SuppressWarnings("unchecked") protected void dispatch(RendererClickHandler handler) { EventTarget target = getNativeEvent().getEventTarget(); if (!Element.is(target)) { return; } Element e = Element.as(target); Grid<R> grid = (Grid<R>) findClosestParentGrid(e); cell = findCell(grid, e); row = cell.getRow(); handler.onClick(this); } /** * Returns the cell the given element belongs to. * * @param grid * the grid instance that is queried * @param e * a cell element or the descendant of one * @return the cell or null if the element is not a grid cell or a * descendant of one */ private static <T> CellReference<T> findCell(Grid<T> grid, Element e) { RowContainer container = getEscalator(grid).findRowContainer(e); if (container == null) { return null; } Cell cell = container.getCell(e); EventCellReference<T> cellReference = new EventCellReference<T>( grid); // FIXME: Section is currently always body. Might be useful for the // future to have an actual check. cellReference.set(cell, Section.BODY); return cellReference; } private native static Escalator getEscalator(Grid<?> grid) /*-{ return grid.@com.vaadin.client.widgets.Grid::escalator; }-*/; /** * Returns the Grid instance containing the given element, if any. * <p> * <strong>Note:</strong> This method may not work reliably if the grid * in question is wrapped in a {@link Composite} <em>unless</em> the * element is inside another widget that is a child of the wrapped grid; * please refer to the note in * {@link WidgetUtil#findWidget(Element, Class) Util.findWidget} for * details. * * @param e * the element whose parent grid to find * @return the parent grid or null if none found. */ private static Grid<?> findClosestParentGrid(Element e) { Widget w = WidgetUtil.findWidget(e, null); while (w != null && !(w instanceof Grid)) { w = w.getParent(); } return (Grid<?>) w; } } private HandlerManager handlerManager; /** * {@inheritDoc} * <p> * <em>Implementation note:</em> It is the implementing method's * responsibility to add {@code this} as a click handler of the returned * widget, or a widget nested therein, in order to make click events * propagate properly to handlers registered via * {@link #addClickHandler(RendererClickHandler) addClickHandler}. */ @Override public abstract W createWidget(); /** * Adds a click handler to this button renderer. The handler is invoked * every time one of the widgets rendered by this renderer is clicked. * <p> * Note that the row type of the click handler must match the row type of * the containing Grid. * * @param handler * the click handler to be added */ public HandlerRegistration addClickHandler(RendererClickHandler<?> handler) { if (handlerManager == null) { handlerManager = new HandlerManager(this); } return handlerManager.addHandler(RendererClickEvent.TYPE, handler); } @Override public void onClick(ClickEvent event) { /* * The handler manager is lazily instantiated so it's null iff * addClickHandler is never called. */ if (handlerManager != null) { DomEvent.fireNativeEvent(event.getNativeEvent(), handlerManager); } } }
/* * RDV * Real-time Data Viewer * http://rdv.googlecode.com/ * * Copyright (c) 2005-2007 University at Buffalo * Copyright (c) 2005-2007 NEES Cyberinfrastructure Center * Copyright (c) 2008 Palta Software * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * $URL$ * $Revision$ * $Date$ * $Author$ */ package org.rdv.data; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; /** * A channel and its metadata. * * @author Jason P. Hanley */ public class Channel { /** The name of the channel */ private final String name; /** The unit of the channel */ private String unit; /** the start time for the data */ private double start; /** the duration of the data */ private double duration; /** the metadata map for this channel */ private final Map<String, String> metadata; /** * Creates a channel. * * @param name the name of the channel */ public Channel(String name) { this(name, null); } /** * Creates a channel with a unit. * * @param name the name of the channel * @param unit the unit for the channel */ public Channel(String name, String unit) { if (name == null) { throw new IllegalArgumentException("Null channel name argument."); } this.name = name; this.unit = unit; metadata = new HashMap<String, String>(); } /** * Gets the name of the channel. * * @return the name of the channel */ public String getName() { return name; } /** * Gets the unit for the channel. * * @return the unit for the channel, or null if no unit was set */ public String getUnit() { return unit; } /** * Sets the unit for the channel. * * @param unit the unit for the channel */ protected void setUnit(String unit) { this.unit = unit; } /** * Gets the start time for the data. * * @return the start time */ public double getStart() { return start; } /** * Sets the start time for the data. * * @param start the new start time */ protected void setStart(double start) { this.start = start; } /** * Gets the duration of the data. * * @return the duration */ public double getDuration() { return duration; } /** * Sets the duration of the data. * * @param duration the new duration */ protected void setDuration(double duration) { this.duration = duration; } /** * Gets the metatadata string associated with the given key. * * @param key the key corresponding to the desired metadata string * @return the metadata string or null if the key was not found * @since 1.3 */ public String getMetadata(String key) { return (String) metadata.get(key); } /** * Gets the map of metadata for this channel. * * @return the metadata map */ public Map<String,String> getMetadata() { return Collections.unmodifiableMap(metadata); } /** * Sets the metadata for <code>key</code> with an empty value. * * @param key the key to the metadata */ protected void setMetadata(String key) { setMetadata(key, null); } /** * Sets the metadata <code>value</code> for the <code>key</code> * * @param key the key to the metadata * @param value the value of the metadata */ protected void setMetadata(String key, String value) { if (key == null) { throw new IllegalArgumentException("Metadata key can't be null."); } if (value == null) { value = ""; } metadata.put(key, value); } /** * Return a string with the channel name and all metadata. * * @return a string representation of the channel and its metadata */ public String toString() { StringBuilder string = new StringBuilder(getName()); if (getUnit() != null) { string.append(" ("); string.append(getUnit()); string.append(")"); } if (metadata.size() > 0) { string.append(": "); Set<String> keys = metadata.keySet(); Iterator<String> it = keys.iterator(); while (it.hasNext()) { String key = it.next(); string.append(key); String value = metadata.get(key); if (value.length() > 0) { string.append('='); string.append(value); } if (it.hasNext()) { string.append(", "); } } } return string.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.instructions; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.sysml.api.monitoring.Location; import org.apache.sysml.lops.Lop; import org.apache.sysml.parser.DataIdentifier; import org.apache.sysml.runtime.DMLRuntimeException; import org.apache.sysml.runtime.controlprogram.context.ExecutionContext; public abstract class Instruction { public enum INSTRUCTION_TYPE { CONTROL_PROGRAM, MAPREDUCE, EXTERNAL_LIBRARY, MAPREDUCE_JOB, BREAKPOINT, SPARK, GPU }; protected static final Log LOG = LogFactory.getLog(Instruction.class.getName()); public static final String OPERAND_DELIM = Lop.OPERAND_DELIMITOR; public static final String DATATYPE_PREFIX = Lop.DATATYPE_PREFIX; public static final String VALUETYPE_PREFIX = Lop.VALUETYPE_PREFIX; public static final String LITERAL_PREFIX = Lop.LITERAL_PREFIX; public static final String INSTRUCTION_DELIM = Lop.INSTRUCTION_DELIMITOR; public static final String NAME_VALUE_SEPARATOR = Lop.NAME_VALUE_SEPARATOR; public static final String SP_INST_PREFIX = "sp_"; public static final String GPU_INST_PREFIX = "gpu_"; //basic instruction meta data protected INSTRUCTION_TYPE type = null; protected String instString = null; protected String instOpcode = null; private long instID = -1; //originating script positions protected int beginLine = -1; protected int endLine = -1; protected int beginCol = -1; protected int endCol = -1; public void setType (INSTRUCTION_TYPE tp ) { type = tp; } public INSTRUCTION_TYPE getType() { return type; } /** * Setter for instruction line number * @param ln Exact (or approximate) DML script line number */ public void setLocation ( int beginLine, int endLine, int beginCol, int endCol) { this.beginLine = beginLine; this.endLine = endLine; this.beginCol = beginCol; this.endCol = endCol; } public void setLocation(Lop lop) { if(lop != null) { this.beginLine = lop._beginLine; this.endLine = lop._endLine; this.beginCol = lop._beginColumn; this.endCol = lop._endColumn; } } public void setLocation(DataIdentifier id) { if(id != null) { this.beginLine = id.getBeginLine(); this.endLine = id.getEndLine(); this.beginCol = id.getBeginColumn(); this.endCol = id.getEndColumn(); } } public void setLocation(Instruction oldInst) { if(oldInst != null) { this.beginLine = oldInst.beginLine; this.endLine = oldInst.endLine; this.beginCol = oldInst.beginCol; this.endCol = oldInst.endCol; } } public Location getLocation() { // Rather than exposing 4 different getter methods. Also Location doesnot contain any references to Spark libraries if(beginLine == -1 || endLine == -1 || beginCol == -1 || endCol == -1) { return null; } else return new Location(beginLine, endLine, beginCol, endCol); } /** * Getter for instruction line number * @return lineNum Instruction approximate DML script line number */ public int getLineNum() { return beginLine; } /** * Setter for instruction unique identifier * @param id Instruction unique identifier */ public void setInstID ( long id ) { instID = id; } /** * Getter for instruction unique identifier * @return instID Instruction unique identifier */ public long getInstID() { return instID; } public void printMe() { LOG.debug(instString); } public String toString() { return instString; } public String getGraphString() { return null; } public String getOpcode() { return instOpcode; } public String getExtendedOpcode() { if( type == INSTRUCTION_TYPE.SPARK ) return SP_INST_PREFIX + getOpcode(); else if( type == INSTRUCTION_TYPE.GPU ) return GPU_INST_PREFIX + getOpcode(); else return getOpcode(); } /** * * @return */ public boolean requiresLabelUpdate() { return instString.contains( Lop.VARIABLE_NAME_PLACEHOLDER ); } /** * All instructions that have thread-specific filenames or names encoded in it * should overwrite this method in order to update (1) the in-memory instruction * and (2) the instruction string * * @param pattern * @param replace * @throws DMLRuntimeException */ public void updateInstructionThreadID(String pattern, String replace) throws DMLRuntimeException { //do nothing } /** * This method should be used for any setup before executing this instruction. * Overwriting methods should first call the super method and subsequently do * their custom setup. * * @param ec * @return * @throws DMLRuntimeException */ public Instruction preprocessInstruction(ExecutionContext ec) throws DMLRuntimeException { //update debug status ec.updateDebugState( this ); //return instruction ifself return this; } /** * This method should be used to execute the instruction. * * @param ec * @throws DMLRuntimeException */ public abstract void processInstruction(ExecutionContext ec) throws DMLRuntimeException; /** * This method should be used for any tear down after executing this instruction. * Overwriting methods should first do their custom tear down and subsequently * call the super method. * * @param ec */ public void postprocessInstruction(ExecutionContext ec) throws DMLRuntimeException { //do nothing } }
package pt.tumba.spell; import java.util.AbstractCollection; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; /** * A <code>MultiValueMap</code> allows storing multiple values for each key. * Putting a value into the map will add the value to a Collection at that key. * Getting a value will return a Collection, holding all the values put to that * key. * <p> * This implementation uses an <code>ArrayList</code> as the collection. * When there are no values mapped to a key, <code>null</code> is returned. * <p> * This class does not implement the {@link java.util.Map} interface because * of the slightly different semantics: <code>put</code> adds a value of * type <code>V</code>, but <code>get</code> returns a Collection of * <code>V</code> objects instead of a single <code>V</code> object. * <p> * For example: * <pre> * MultiValueMap&lt;String, String&gt; mm = * new MultiValueMap&lt;String, String&gt;(); * mm.put(key, "A"); * mm.put(key, "B"); * mm.put(key, "C"); * Collection&lt;String&gt; col = mm.get(key); * </pre> * * <p> * <code>col</code> will be a collection containing "A", "B", "C". * <p> * This class has been adapted from the <code>MultiHashMap</code> in the * <a href="http://jakarta.apache.org/commons/collections/">Jakarta Commons * Collections</a>. * * @author Bruno Martins */ public class MultiValueMap { /** * Inner class to view the elements. */ private class Values extends AbstractCollection { /** * {@inheritDoc} */ public Iterator iterator() { return new ValueIterator(); } /** * {@inheritDoc} */ public int size() { int compt = 0; Iterator it = iterator(); while (it.hasNext()) { it.next(); compt++; } return compt; } /** * {@inheritDoc} */ public void clear() { MultiValueMap.this.clear(); } } /** * Inner iterator to view the elements. */ private final class ValueIterator implements Iterator { /** * The backed iterator. */ private Iterator backedIterator; /** * Iterator used to search. */ private Iterator tempIterator; /** * Creates a new instance. */ private ValueIterator() { backedIterator = store.values().iterator(); } /** * Searches for the next available iterator. * @return <code>true</code> if there is a new iterator */ private boolean searchNextIterator() { while (tempIterator == null || !tempIterator.hasNext()) { if (!backedIterator.hasNext()) { return false; } tempIterator = ((Collection)backedIterator.next()).iterator(); } return true; } /** * {@inheritDoc} */ public boolean hasNext() { return searchNextIterator(); } /** * {@inheritDoc} */ public Object next() { if (!searchNextIterator()) { throw new NoSuchElementException(); } return tempIterator.next(); } /** * {@inheritDoc} */ public void remove() { if (tempIterator == null) { throw new IllegalStateException(); } tempIterator.remove(); } } /** * Wrapped map used as storage. */ private final Map store; /** * Backed values collection. */ private transient Collection values = null; /** * Creates a new instance, using a {@link HashMap} as storage. */ public MultiValueMap() { this(new HashMap()); } /** * Creates a new instance. * * @param wrappedMap wrapped map used as storage, e.g. a {@link HashMap} or * a {@link java.util.TreeMap} */ public MultiValueMap(final Map wrappedMap) { store = wrappedMap; } /** * Creates a new instance of the map value Collection container. * <p> * This method can be overridden to use your own collection type. * * @param coll the collection to copy, may be <code>null</code> * @return the new collection */ protected Collection createCollection(final Collection coll) { if (coll == null) { return new ArrayList(); } else { return new ArrayList(coll); } } /** * Removes all mappings from this map. */ public void clear() { store.clear(); } /** * Returns <code>true</code> if this map contains a mapping for the * specified key. More formally, returns <code>true</code> if and only if * this map contains a mapping for a key <code>k</code> such that * <code>(key==null ? k==null : key.equals(k))</code>. (There can be one or * several such mappings.) * * @param key key whose presence in this map is to be tested * @return <code>true</code> if this map contains a mapping for the * pecified key */ public boolean containsKey(Object key) { return store.containsKey(key); } /** * Checks whether the map contains the value specified. * <p> * This checks all collections against all keys for the value, and thus * could be slow. * * @param value the value to search for * @return <code>true</code> if the map contains the value */ public boolean containsValue(Object value) { final Set pairs = store.entrySet(); if (pairs == null) { return false; } final Iterator pairsIterator = pairs.iterator(); Map.Entry keyValuePair; while (pairsIterator.hasNext()) { keyValuePair = (Map.Entry) pairsIterator.next(); Collection coll = (Collection) keyValuePair.getValue(); if (coll.contains(value)) { return true; } } return false; } /** * Checks whether the collection at the specified key contains the value. * * @param key the key to use * @param value the value to search for * @return <code>true</code> if the map contains the value at the specified * key */ public boolean containsValue(final Object key, final Object value) { Collection coll = get(key); if (coll == null) { return false; } else { return coll.contains(value); } } /** * Compares the specified object with this map for equality. Returns * <code>true</code> if the given object is also a * <code>MultiValueMap</code> map or a map of collections and the two Maps * represent the same mappings. * * @param o object to be compared for equality with this map * @return <code>true</code> if the specified object is equal to this map */ public boolean equals(Object o) { if (o instanceof MultiValueMap) { // compare wrapped maps return store.equals(((MultiValueMap) o).store); } else if (o instanceof Map) { // compare wrapped map with object return store.equals(o); } else { return false; } } /** * Returns the collection of values to which this map maps the specified * key. * * @param key key whose associated value is to be returned * @return the collection of values to which this map maps the specified * key, or <code>null</code> if the map contains no mapping for this key */ public Collection get(Object key) { return (Collection)(store.get(key)); } /** * Returns the hash code value for this map. * * @return the hash code value for this map. */ public int hashCode() { return store.hashCode(); } /** * Returns <code>true</code> if this map contains no key-value mappings. * * @return <code>true</code> if this map contains no key-value mappings */ public boolean isEmpty() { return store.isEmpty(); } /** * Returns a set view of the keys contained in this map. The set is backed * by the map, so changes to the map are reflected in the set, and * vice-versa. * * @return a set view of the keys contained in this map */ public Set keySet() { return store.keySet(); } /** * Adds the value to the collection associated with the specified key. * <p> * Unlike a normal <code>Map</code> the previous value is not replaced. * Instead the new value is added to the collection stored against the key. * * @param key the key to store against * @param value the value to add to the collection at the key * @return the value added if the map changed and <code>null</code> if the * map did not change */ public Object put(Object key, Object value) { Collection coll = get(key); if (coll == null) { coll = createCollection(null); store.put(key, coll); } final boolean result = coll.add(value); return (result ? value : null); } /** * Adds a collection of values to the collection associated with the * specified key. * * @param key the key to store against * @param valueCol the values to add to the collection at the key, * ignored if <code>null</code> * @return <code>true</code> if this map changed */ public boolean putAll(final Object key, final Collection valueCol) { if (valueCol == null || valueCol.size() == 0) { return false; } Collection coll = get(key); if (coll == null) { coll = createCollection(valueCol); if (coll.size() == 0) { return false; } store.put(key, coll); return true; } else { return coll.addAll(valueCol); } } /** * Removes all mappings for this key from this map if any are present. * Returns the collection of value to which the map previously associated * the key, or <code>null</code> if the map contained no mappings for this * key. The map will not contain any mappings for the specified key once * the call returns. * * @param key key whose mappings are to be removed from the map. * @return collection of values previously associated with specified key, * or <code>null</code> if there was no mapping for key */ public Collection remove(Object key) { return (Collection)(store.remove(key)); } /** * Removes a specific value from map. * <p> * The item is removed from the collection mapped to the specified key. * Other values attached to that key are unaffected. * <p> * If the last value for a key is removed, <code>null</code> will be * returned from a subsequant <code>get(key)</code>. * * @param key the key to remove from * @param item the value to remove * @return the value removed (which was passed in), <code>null</code> if * nothing removed */ public Object remove(final Object key, final Object item) { final Collection valuesForKey = get(key); if (valuesForKey == null) { return null; } else { valuesForKey.remove(item); // remove the list if it is now empty // (saves space, and allows equals to work) if (valuesForKey.isEmpty()) { remove(key); } return item; } } /** * Returns the number of key-value mappings in this map. If the map contains * more than <code>Integer.MAX_VALUE</code> elements, returns * <code>Integer.MAX_VALUE</code>. * * @return the number of key-value mappings in this map */ public int size() { return store.size(); } /** * Gets the size of the collection mapped to the specified key. * * @param key the key to get size for * @return the size of the collection at the key, zero if key not in map */ public int size(final Object key) { final Collection coll = get(key); if (coll == null) { return 0; } else { return coll.size(); } } /** * Gets the total size of the map by counting all the values. * * @return the total size of the map counting all values */ public int totalSize() { int total = 0; final Iterator it = store.values().iterator(); Collection coll; while (it.hasNext()) { coll = (Collection)(it.next()); total += coll.size(); } return total; } /** * Gets a collection containing all the values in the map. * <p> * This returns a collection containing the combination of values from all * keys. * * @return a collection view of the values contained in this map */ public Collection values() { if (values == null) { values = new Values(); } return values; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opennlp.tools.util; import java.io.IOException; import java.io.InputStream; import java.util.Properties; /** * The {@link Version} class represents the OpenNlp Tools library version. * <p> * The version has three parts: * <ul> * <li>Major: OpenNlp Tools libraries with a different major version are not interchangeable.</li> * <li>Minor: OpenNlp Tools libraries with an identical major version, but different * minor version may be interchangeable. See release notes for further details.</li> * <li>Revision: OpenNlp Tools libraries with same major and minor version, but a different * revision, are fully interchangeable.</li> * </ul> */ public class Version { private static final String DEV_VERSION_STRING = "0.0.0-SNAPSHOT"; public static final Version DEV_VERSION = Version.parse(DEV_VERSION_STRING); private static final String SNAPSHOT_MARKER = "-SNAPSHOT"; private final int major; private final int minor; private final int revision; private final boolean snapshot; /** * Initializes the current instance with the provided * versions. * * @param major * @param minor * @param revision * @param snapshot */ public Version(int major, int minor, int revision, boolean snapshot) { this.major = major; this.minor = minor; this.revision = revision; this.snapshot = snapshot; } /** * Initializes the current instance with the provided * versions. The version will not be a snapshot version. * * @param major * @param minor * @param revision */ public Version(int major, int minor, int revision) { this(major, minor, revision, false); } /** * Retrieves the major version. * * @return major version */ public int getMajor() { return major; } /** * Retrieves the minor version. * * @return minor version */ public int getMinor() { return minor; } /** * Retrieves the revision version. * * @return revision version */ public int getRevision() { return revision; } public boolean isSnapshot() { return snapshot; } /** * Retrieves the version string. * * The {@link #parse(String)} method can create an instance * of {@link Version} with the returned version value string. * * @return the version value string */ @Override public String toString() { return Integer.toString(getMajor()) + "." + Integer.toString(getMinor()) + "." + Integer.toString(getRevision()) + (isSnapshot() ? SNAPSHOT_MARKER : ""); } @Override public boolean equals(Object o) { if (o == this) { return true; } else if (o instanceof Version) { Version version = (Version) o; return getMajor() == version.getMajor() && getMinor() == version.getMinor() && getRevision() == version.getRevision() && isSnapshot() == version.isSnapshot(); } else { return false; } } /** * Return a new {@link Version} initialized to the value * represented by the specified {@link String} * * @param version the string to be parsed * * @return the version represented by the string value * * @throws NumberFormatException if the string does * not contain a valid version */ public static Version parse(String version) { int indexFirstDot = version.indexOf('.'); int indexSecondDot = version.indexOf('.', indexFirstDot + 1); if (indexFirstDot == -1 || indexSecondDot == -1) { throw new NumberFormatException("Invalid version format '" + version + "', expected two dots!"); } int indexFirstDash = version.indexOf('-'); int versionEnd; if (indexFirstDash == -1) { versionEnd = version.length(); } else { versionEnd = indexFirstDash; } boolean snapshot = version.endsWith(SNAPSHOT_MARKER); return new Version(Integer.parseInt(version.substring(0, indexFirstDot)), Integer.parseInt(version.substring(indexFirstDot + 1, indexSecondDot)), Integer.parseInt(version.substring(indexSecondDot + 1, versionEnd)), snapshot); } /** * Retrieves the current version of the OpenNlp Tools library. * * @return the current version */ public static Version currentVersion() { Properties manifest = new Properties(); // Try to read the version from the version file if it is available, // otherwise set the version to the development version InputStream versionIn = Version.class.getResourceAsStream("opennlp.version"); if (versionIn != null) { try { manifest.load(versionIn); } catch (IOException e) { // ignore error } finally { try { versionIn.close(); } catch (IOException e) { // ignore error } } } String versionString = manifest.getProperty("OpenNLP-Version", DEV_VERSION_STRING); if (versionString.equals("${pom.version}")) versionString = DEV_VERSION_STRING; return Version.parse(versionString); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math3.analysis.interpolation; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.math3.analysis.MultivariateFunction; import org.apache.commons.math3.exception.DimensionMismatchException; import org.apache.commons.math3.exception.NoDataException; import org.apache.commons.math3.exception.NullArgumentException; import org.apache.commons.math3.linear.ArrayRealVector; import org.apache.commons.math3.linear.RealVector; import org.apache.commons.math3.random.UnitSphereRandomVectorGenerator; import org.apache.commons.math3.util.FastMath; /** * Interpolating function that implements the * <a href="http://www.dudziak.com/microsphere.php">Microsphere Projection</a>. * * @version $Id: MicrosphereInterpolatingFunction.java 1455194 2013-03-11 15:45:54Z luc $ */ public class MicrosphereInterpolatingFunction implements MultivariateFunction { /** * Space dimension. */ private final int dimension; /** * Internal accounting data for the interpolation algorithm. * Each element of the list corresponds to one surface element of * the microsphere. */ private final List<MicrosphereSurfaceElement> microsphere; /** * Exponent used in the power law that computes the weights of the * sample data. */ private final double brightnessExponent; /** * Sample data. */ private final Map<RealVector, Double> samples; /** * Class for storing the accounting data needed to perform the * microsphere projection. */ private static class MicrosphereSurfaceElement { /** Normal vector characterizing a surface element. */ private final RealVector normal; /** Illumination received from the brightest sample. */ private double brightestIllumination; /** Brightest sample. */ private Map.Entry<RealVector, Double> brightestSample; /** * @param n Normal vector characterizing a surface element * of the microsphere. */ MicrosphereSurfaceElement(double[] n) { normal = new ArrayRealVector(n); } /** * Return the normal vector. * @return the normal vector */ RealVector normal() { return normal; } /** * Reset "illumination" and "sampleIndex". */ void reset() { brightestIllumination = 0; brightestSample = null; } /** * Store the illumination and index of the brightest sample. * @param illuminationFromSample illumination received from sample * @param sample current sample illuminating the element */ void store(final double illuminationFromSample, final Map.Entry<RealVector, Double> sample) { if (illuminationFromSample > this.brightestIllumination) { this.brightestIllumination = illuminationFromSample; this.brightestSample = sample; } } /** * Get the illumination of the element. * @return the illumination. */ double illumination() { return brightestIllumination; } /** * Get the sample illuminating the element the most. * @return the sample. */ Map.Entry<RealVector, Double> sample() { return brightestSample; } } /** * @param xval Arguments for the interpolation points. * {@code xval[i][0]} is the first component of interpolation point * {@code i}, {@code xval[i][1]} is the second component, and so on * until {@code xval[i][d-1]}, the last component of that interpolation * point (where {@code dimension} is thus the dimension of the sampled * space). * @param yval Values for the interpolation points. * @param brightnessExponent Brightness dimming factor. * @param microsphereElements Number of surface elements of the * microsphere. * @param rand Unit vector generator for creating the microsphere. * @throws DimensionMismatchException if the lengths of {@code yval} and * {@code xval} (equal to {@code n}, the number of interpolation points) * do not match, or the the arrays {@code xval[0]} ... {@code xval[n]}, * have lengths different from {@code dimension}. * @throws NoDataException if there an array has zero-length. * @throws NullArgumentException if an argument is {@code null}. */ public MicrosphereInterpolatingFunction(double[][] xval, double[] yval, int brightnessExponent, int microsphereElements, UnitSphereRandomVectorGenerator rand) throws DimensionMismatchException, NoDataException, NullArgumentException { if (xval == null || yval == null) { throw new NullArgumentException(); } if (xval.length == 0) { throw new NoDataException(); } if (xval.length != yval.length) { throw new DimensionMismatchException(xval.length, yval.length); } if (xval[0] == null) { throw new NullArgumentException(); } dimension = xval[0].length; this.brightnessExponent = brightnessExponent; // Copy data samples. samples = new HashMap<RealVector, Double>(yval.length); for (int i = 0; i < xval.length; ++i) { final double[] xvalI = xval[i]; if (xvalI == null) { throw new NullArgumentException(); } if (xvalI.length != dimension) { throw new DimensionMismatchException(xvalI.length, dimension); } samples.put(new ArrayRealVector(xvalI), yval[i]); } microsphere = new ArrayList<MicrosphereSurfaceElement>(microsphereElements); // Generate the microsphere, assuming that a fairly large number of // randomly generated normals will represent a sphere. for (int i = 0; i < microsphereElements; i++) { microsphere.add(new MicrosphereSurfaceElement(rand.nextVector())); } } /** * @param point Interpolation point. * @return the interpolated value. * @throws DimensionMismatchException if point dimension does not math sample */ public double value(double[] point) throws DimensionMismatchException { final RealVector p = new ArrayRealVector(point); // Reset. for (MicrosphereSurfaceElement md : microsphere) { md.reset(); } // Compute contribution of each sample points to the microsphere elements illumination for (Map.Entry<RealVector, Double> sd : samples.entrySet()) { // Vector between interpolation point and current sample point. final RealVector diff = sd.getKey().subtract(p); final double diffNorm = diff.getNorm(); if (FastMath.abs(diffNorm) < FastMath.ulp(1d)) { // No need to interpolate, as the interpolation point is // actually (very close to) one of the sampled points. return sd.getValue(); } for (MicrosphereSurfaceElement md : microsphere) { final double w = FastMath.pow(diffNorm, -brightnessExponent); md.store(cosAngle(diff, md.normal()) * w, sd); } } // Interpolation calculation. double value = 0; double totalWeight = 0; for (MicrosphereSurfaceElement md : microsphere) { final double iV = md.illumination(); final Map.Entry<RealVector, Double> sd = md.sample(); if (sd != null) { value += iV * sd.getValue(); totalWeight += iV; } } return value / totalWeight; } /** * Compute the cosine of the angle between 2 vectors. * * @param v Vector. * @param w Vector. * @return the cosine of the angle between {@code v} and {@code w}. */ private double cosAngle(final RealVector v, final RealVector w) { return v.dotProduct(w) / (v.getNorm() * w.getNorm()); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.simpleemail.model; import java.io.Serializable; /** * <p> * Represents the DKIM attributes of a verified email address or a domain. * </p> */ public class IdentityDkimAttributes implements Serializable, Cloneable { /** * <p> * True if DKIM signing is enabled for email sent from the identity; false * otherwise. * </p> */ private Boolean dkimEnabled; /** * <p> * Describes whether Amazon SES has successfully verified the DKIM DNS * records (tokens) published in the domain name's DNS. (This only applies * to domain identities, not email address identities.) * </p> */ private String dkimVerificationStatus; /** * <p> * A set of character strings that represent the domain's identity. Using * these tokens, you will need to create DNS CNAME records that point to * DKIM public keys hosted by Amazon SES. Amazon Web Services will * eventually detect that you have updated your DNS records; this detection * process may take up to 72 hours. Upon successful detection, Amazon SES * will be able to DKIM-sign email originating from that domain. (This only * applies to domain identities, not email address identities.) * </p> * <p> * For more information about creating DNS records using DKIM tokens, go to * the <a href= * "http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html" * >Amazon SES Developer Guide</a>. * </p> */ private com.amazonaws.internal.SdkInternalList<String> dkimTokens; /** * <p> * True if DKIM signing is enabled for email sent from the identity; false * otherwise. * </p> * * @param dkimEnabled * True if DKIM signing is enabled for email sent from the identity; * false otherwise. */ public void setDkimEnabled(Boolean dkimEnabled) { this.dkimEnabled = dkimEnabled; } /** * <p> * True if DKIM signing is enabled for email sent from the identity; false * otherwise. * </p> * * @return True if DKIM signing is enabled for email sent from the identity; * false otherwise. */ public Boolean getDkimEnabled() { return this.dkimEnabled; } /** * <p> * True if DKIM signing is enabled for email sent from the identity; false * otherwise. * </p> * * @param dkimEnabled * True if DKIM signing is enabled for email sent from the identity; * false otherwise. * @return Returns a reference to this object so that method calls can be * chained together. */ public IdentityDkimAttributes withDkimEnabled(Boolean dkimEnabled) { setDkimEnabled(dkimEnabled); return this; } /** * <p> * True if DKIM signing is enabled for email sent from the identity; false * otherwise. * </p> * * @return True if DKIM signing is enabled for email sent from the identity; * false otherwise. */ public Boolean isDkimEnabled() { return this.dkimEnabled; } /** * <p> * Describes whether Amazon SES has successfully verified the DKIM DNS * records (tokens) published in the domain name's DNS. (This only applies * to domain identities, not email address identities.) * </p> * * @param dkimVerificationStatus * Describes whether Amazon SES has successfully verified the DKIM * DNS records (tokens) published in the domain name's DNS. (This * only applies to domain identities, not email address identities.) * @see VerificationStatus */ public void setDkimVerificationStatus(String dkimVerificationStatus) { this.dkimVerificationStatus = dkimVerificationStatus; } /** * <p> * Describes whether Amazon SES has successfully verified the DKIM DNS * records (tokens) published in the domain name's DNS. (This only applies * to domain identities, not email address identities.) * </p> * * @return Describes whether Amazon SES has successfully verified the DKIM * DNS records (tokens) published in the domain name's DNS. (This * only applies to domain identities, not email address identities.) * @see VerificationStatus */ public String getDkimVerificationStatus() { return this.dkimVerificationStatus; } /** * <p> * Describes whether Amazon SES has successfully verified the DKIM DNS * records (tokens) published in the domain name's DNS. (This only applies * to domain identities, not email address identities.) * </p> * * @param dkimVerificationStatus * Describes whether Amazon SES has successfully verified the DKIM * DNS records (tokens) published in the domain name's DNS. (This * only applies to domain identities, not email address identities.) * @return Returns a reference to this object so that method calls can be * chained together. * @see VerificationStatus */ public IdentityDkimAttributes withDkimVerificationStatus( String dkimVerificationStatus) { setDkimVerificationStatus(dkimVerificationStatus); return this; } /** * <p> * Describes whether Amazon SES has successfully verified the DKIM DNS * records (tokens) published in the domain name's DNS. (This only applies * to domain identities, not email address identities.) * </p> * * @param dkimVerificationStatus * Describes whether Amazon SES has successfully verified the DKIM * DNS records (tokens) published in the domain name's DNS. (This * only applies to domain identities, not email address identities.) * @see VerificationStatus */ public void setDkimVerificationStatus( VerificationStatus dkimVerificationStatus) { this.dkimVerificationStatus = dkimVerificationStatus.toString(); } /** * <p> * Describes whether Amazon SES has successfully verified the DKIM DNS * records (tokens) published in the domain name's DNS. (This only applies * to domain identities, not email address identities.) * </p> * * @param dkimVerificationStatus * Describes whether Amazon SES has successfully verified the DKIM * DNS records (tokens) published in the domain name's DNS. (This * only applies to domain identities, not email address identities.) * @return Returns a reference to this object so that method calls can be * chained together. * @see VerificationStatus */ public IdentityDkimAttributes withDkimVerificationStatus( VerificationStatus dkimVerificationStatus) { setDkimVerificationStatus(dkimVerificationStatus); return this; } /** * <p> * A set of character strings that represent the domain's identity. Using * these tokens, you will need to create DNS CNAME records that point to * DKIM public keys hosted by Amazon SES. Amazon Web Services will * eventually detect that you have updated your DNS records; this detection * process may take up to 72 hours. Upon successful detection, Amazon SES * will be able to DKIM-sign email originating from that domain. (This only * applies to domain identities, not email address identities.) * </p> * <p> * For more information about creating DNS records using DKIM tokens, go to * the <a href= * "http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html" * >Amazon SES Developer Guide</a>. * </p> * * @return A set of character strings that represent the domain's identity. * Using these tokens, you will need to create DNS CNAME records * that point to DKIM public keys hosted by Amazon SES. Amazon Web * Services will eventually detect that you have updated your DNS * records; this detection process may take up to 72 hours. Upon * successful detection, Amazon SES will be able to DKIM-sign email * originating from that domain. (This only applies to domain * identities, not email address identities.)</p> * <p> * For more information about creating DNS records using DKIM * tokens, go to the <a href= * "http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html" * >Amazon SES Developer Guide</a>. */ public java.util.List<String> getDkimTokens() { if (dkimTokens == null) { dkimTokens = new com.amazonaws.internal.SdkInternalList<String>(); } return dkimTokens; } /** * <p> * A set of character strings that represent the domain's identity. Using * these tokens, you will need to create DNS CNAME records that point to * DKIM public keys hosted by Amazon SES. Amazon Web Services will * eventually detect that you have updated your DNS records; this detection * process may take up to 72 hours. Upon successful detection, Amazon SES * will be able to DKIM-sign email originating from that domain. (This only * applies to domain identities, not email address identities.) * </p> * <p> * For more information about creating DNS records using DKIM tokens, go to * the <a href= * "http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html" * >Amazon SES Developer Guide</a>. * </p> * * @param dkimTokens * A set of character strings that represent the domain's identity. * Using these tokens, you will need to create DNS CNAME records that * point to DKIM public keys hosted by Amazon SES. Amazon Web * Services will eventually detect that you have updated your DNS * records; this detection process may take up to 72 hours. Upon * successful detection, Amazon SES will be able to DKIM-sign email * originating from that domain. (This only applies to domain * identities, not email address identities.)</p> * <p> * For more information about creating DNS records using DKIM tokens, * go to the <a href= * "http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html" * >Amazon SES Developer Guide</a>. */ public void setDkimTokens(java.util.Collection<String> dkimTokens) { if (dkimTokens == null) { this.dkimTokens = null; return; } this.dkimTokens = new com.amazonaws.internal.SdkInternalList<String>( dkimTokens); } /** * <p> * A set of character strings that represent the domain's identity. Using * these tokens, you will need to create DNS CNAME records that point to * DKIM public keys hosted by Amazon SES. Amazon Web Services will * eventually detect that you have updated your DNS records; this detection * process may take up to 72 hours. Upon successful detection, Amazon SES * will be able to DKIM-sign email originating from that domain. (This only * applies to domain identities, not email address identities.) * </p> * <p> * For more information about creating DNS records using DKIM tokens, go to * the <a href= * "http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html" * >Amazon SES Developer Guide</a>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setDkimTokens(java.util.Collection)} or * {@link #withDkimTokens(java.util.Collection)} if you want to override the * existing values. * </p> * * @param dkimTokens * A set of character strings that represent the domain's identity. * Using these tokens, you will need to create DNS CNAME records that * point to DKIM public keys hosted by Amazon SES. Amazon Web * Services will eventually detect that you have updated your DNS * records; this detection process may take up to 72 hours. Upon * successful detection, Amazon SES will be able to DKIM-sign email * originating from that domain. (This only applies to domain * identities, not email address identities.)</p> * <p> * For more information about creating DNS records using DKIM tokens, * go to the <a href= * "http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html" * >Amazon SES Developer Guide</a>. * @return Returns a reference to this object so that method calls can be * chained together. */ public IdentityDkimAttributes withDkimTokens(String... dkimTokens) { if (this.dkimTokens == null) { setDkimTokens(new com.amazonaws.internal.SdkInternalList<String>( dkimTokens.length)); } for (String ele : dkimTokens) { this.dkimTokens.add(ele); } return this; } /** * <p> * A set of character strings that represent the domain's identity. Using * these tokens, you will need to create DNS CNAME records that point to * DKIM public keys hosted by Amazon SES. Amazon Web Services will * eventually detect that you have updated your DNS records; this detection * process may take up to 72 hours. Upon successful detection, Amazon SES * will be able to DKIM-sign email originating from that domain. (This only * applies to domain identities, not email address identities.) * </p> * <p> * For more information about creating DNS records using DKIM tokens, go to * the <a href= * "http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html" * >Amazon SES Developer Guide</a>. * </p> * * @param dkimTokens * A set of character strings that represent the domain's identity. * Using these tokens, you will need to create DNS CNAME records that * point to DKIM public keys hosted by Amazon SES. Amazon Web * Services will eventually detect that you have updated your DNS * records; this detection process may take up to 72 hours. Upon * successful detection, Amazon SES will be able to DKIM-sign email * originating from that domain. (This only applies to domain * identities, not email address identities.)</p> * <p> * For more information about creating DNS records using DKIM tokens, * go to the <a href= * "http://docs.aws.amazon.com/ses/latest/DeveloperGuide/easy-dkim-dns-records.html" * >Amazon SES Developer Guide</a>. * @return Returns a reference to this object so that method calls can be * chained together. */ public IdentityDkimAttributes withDkimTokens( java.util.Collection<String> dkimTokens) { setDkimTokens(dkimTokens); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDkimEnabled() != null) sb.append("DkimEnabled: " + getDkimEnabled() + ","); if (getDkimVerificationStatus() != null) sb.append("DkimVerificationStatus: " + getDkimVerificationStatus() + ","); if (getDkimTokens() != null) sb.append("DkimTokens: " + getDkimTokens()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof IdentityDkimAttributes == false) return false; IdentityDkimAttributes other = (IdentityDkimAttributes) obj; if (other.getDkimEnabled() == null ^ this.getDkimEnabled() == null) return false; if (other.getDkimEnabled() != null && other.getDkimEnabled().equals(this.getDkimEnabled()) == false) return false; if (other.getDkimVerificationStatus() == null ^ this.getDkimVerificationStatus() == null) return false; if (other.getDkimVerificationStatus() != null && other.getDkimVerificationStatus().equals( this.getDkimVerificationStatus()) == false) return false; if (other.getDkimTokens() == null ^ this.getDkimTokens() == null) return false; if (other.getDkimTokens() != null && other.getDkimTokens().equals(this.getDkimTokens()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDkimEnabled() == null) ? 0 : getDkimEnabled().hashCode()); hashCode = prime * hashCode + ((getDkimVerificationStatus() == null) ? 0 : getDkimVerificationStatus().hashCode()); hashCode = prime * hashCode + ((getDkimTokens() == null) ? 0 : getDkimTokens().hashCode()); return hashCode; } @Override public IdentityDkimAttributes clone() { try { return (IdentityDkimAttributes) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.09.05 at 06:20:26 PM IST // package com.mozu.qbintegration.model.qbmodel.salesorderadd; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for ShipAddressType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ShipAddressType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="Addr1" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Addr2" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Addr3" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Addr4" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Addr5" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="City" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="State" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="PostalCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Country" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="Note" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ShipAddressType", propOrder = { "addr1", "addr2", "addr3", "addr4", "addr5", "city", "state", "postalCode", "country", "note" }) public class ShipAddressType { @XmlElement(name = "Addr1", required = true) protected String addr1; @XmlElement(name = "Addr2", required = true) protected String addr2; @XmlElement(name = "Addr3", required = true) protected String addr3; @XmlElement(name = "Addr4", required = true) protected String addr4; @XmlElement(name = "Addr5", required = true) protected String addr5; @XmlElement(name = "City", required = true) protected String city; @XmlElement(name = "State", required = true) protected String state; @XmlElement(name = "PostalCode", required = true) protected String postalCode; @XmlElement(name = "Country", required = true) protected String country; @XmlElement(name = "Note", required = true) protected String note; /** * Gets the value of the addr1 property. * * @return * possible object is * {@link String } * */ public String getAddr1() { return addr1; } /** * Sets the value of the addr1 property. * * @param value * allowed object is * {@link String } * */ public void setAddr1(String value) { this.addr1 = value; } /** * Gets the value of the addr2 property. * * @return * possible object is * {@link String } * */ public String getAddr2() { return addr2; } /** * Sets the value of the addr2 property. * * @param value * allowed object is * {@link String } * */ public void setAddr2(String value) { this.addr2 = value; } /** * Gets the value of the addr3 property. * * @return * possible object is * {@link String } * */ public String getAddr3() { return addr3; } /** * Sets the value of the addr3 property. * * @param value * allowed object is * {@link String } * */ public void setAddr3(String value) { this.addr3 = value; } /** * Gets the value of the addr4 property. * * @return * possible object is * {@link String } * */ public String getAddr4() { return addr4; } /** * Sets the value of the addr4 property. * * @param value * allowed object is * {@link String } * */ public void setAddr4(String value) { this.addr4 = value; } /** * Gets the value of the addr5 property. * * @return * possible object is * {@link String } * */ public String getAddr5() { return addr5; } /** * Sets the value of the addr5 property. * * @param value * allowed object is * {@link String } * */ public void setAddr5(String value) { this.addr5 = value; } /** * Gets the value of the city property. * * @return * possible object is * {@link String } * */ public String getCity() { return city; } /** * Sets the value of the city property. * * @param value * allowed object is * {@link String } * */ public void setCity(String value) { this.city = value; } /** * Gets the value of the state property. * * @return * possible object is * {@link String } * */ public String getState() { return state; } /** * Sets the value of the state property. * * @param value * allowed object is * {@link String } * */ public void setState(String value) { this.state = value; } /** * Gets the value of the postalCode property. * * @return * possible object is * {@link String } * */ public String getPostalCode() { return postalCode; } /** * Sets the value of the postalCode property. * * @param value * allowed object is * {@link String } * */ public void setPostalCode(String value) { this.postalCode = value; } /** * Gets the value of the country property. * * @return * possible object is * {@link String } * */ public String getCountry() { return country; } /** * Sets the value of the country property. * * @param value * allowed object is * {@link String } * */ public void setCountry(String value) { this.country = value; } /** * Gets the value of the note property. * * @return * possible object is * {@link String } * */ public String getNote() { return note; } /** * Sets the value of the note property. * * @param value * allowed object is * {@link String } * */ public void setNote(String value) { this.note = value; } }
/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.andes.jms.failover; import org.wso2.andes.jms.BrokerDetails; import org.wso2.andes.jms.ConnectionURL; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class FailoverRoundRobinServers implements FailoverMethod { private static final Logger _logger = LoggerFactory.getLogger(FailoverRoundRobinServers.class); /** The default number of times to cycle through all servers */ public static final int DEFAULT_CYCLE_RETRIES = 1; /** The default number of times to retry each server */ public static final int DEFAULT_SERVER_RETRIES = 0; /** The index into the hostDetails array of the broker to which we are connected */ private int _currentBrokerIndex = 0; /** The number of times to retry connecting for each server */ private int _serverRetries; /** The current number of retry attempts made */ private int _currentServerRetry = 0; /** The number of times to cycle through the servers */ private int _cycleRetries; /** The current number of cycles performed. */ private int _currentCycleRetries = 0; /** Array of BrokerDetail used to make connections. */ protected ConnectionURL _connectionDetails; public FailoverRoundRobinServers(ConnectionURL connectionDetails) { if (!(connectionDetails.getBrokerCount() > 0)) { throw new IllegalArgumentException("At least one broker details must be specified."); } _connectionDetails = connectionDetails; // There is no current broker at startup so set it to -1. _currentBrokerIndex = 0; String cycleRetries = _connectionDetails.getOption(ConnectionURL.OPTIONS_FAILOVER_CYCLE); _cycleRetries = DEFAULT_CYCLE_RETRIES; if (cycleRetries != null) { try { _cycleRetries = Integer.parseInt(cycleRetries); } catch (NumberFormatException nfe) { _logger.warn("Cannot set cycle Retries, " + cycleRetries + " is not a number. Using default: " + DEFAULT_CYCLE_RETRIES); } } _currentCycleRetries = 0; _serverRetries = 0; _currentServerRetry = 0; } public void reset() { _currentBrokerIndex = 0; _currentCycleRetries = 0; _currentServerRetry = 0; } public boolean failoverAllowed() { _logger.info("==== Checking failoverAllowed() ===="); _logger.info(toString()); _logger.info("===================================="); return ((_currentCycleRetries < _cycleRetries) || (_currentServerRetry < _serverRetries)); } public void attainedConnection() { _currentCycleRetries = 0; _currentServerRetry = 0; } public BrokerDetails getCurrentBrokerDetails() { return _connectionDetails.getBrokerDetails(_currentBrokerIndex); } public BrokerDetails getNextBrokerDetails() { boolean doDelay = false; if (_currentBrokerIndex == (_connectionDetails.getBrokerCount() - 1)) { if (_currentServerRetry < _serverRetries) { _logger.info("Trying " + _connectionDetails.getBrokerDetails(_currentBrokerIndex)); doDelay= _currentBrokerIndex != 0; _currentServerRetry++; } else { _currentCycleRetries++; // failed to connect to first broker _currentBrokerIndex = 0; setBroker(_connectionDetails.getBrokerDetails(_currentBrokerIndex)); // This is zero rather than -1 as we are already retrieving the details. _currentServerRetry = 0; } // else - should force client to stop as max retries has been reached. } else { if (_currentServerRetry < _serverRetries) { _logger.info("Trying " + _connectionDetails.getBrokerDetails(_currentBrokerIndex)); doDelay= _currentBrokerIndex != 0; _currentServerRetry++; } else { _currentBrokerIndex++; setBroker(_connectionDetails.getBrokerDetails(_currentBrokerIndex)); // This is zero rather than -1 as we are already retrieving the details. _currentServerRetry = 0; doDelay= _currentBrokerIndex != 0; } } BrokerDetails broker = _connectionDetails.getBrokerDetails(_currentBrokerIndex); String delayStr = broker.getProperty(BrokerDetails.OPTIONS_CONNECT_DELAY); if (delayStr != null && doDelay) { Long delay = Long.parseLong(delayStr); _logger.info("Delay between connect retries:" + delay); try { Thread.sleep(delay); } catch (InterruptedException ie) { return null; } } else { // Only display if option not set. Not if deDelay==false. if (delayStr == null) { _logger.info("No delay between connect retries, use tcp://host:port?connectdelay='value' to enable."); } } return broker; } public void setBroker(BrokerDetails broker) { _connectionDetails.addBrokerDetails(broker); int index = _connectionDetails.getAllBrokerDetails().indexOf(broker); String serverRetries = broker.getProperty(BrokerDetails.OPTIONS_RETRY); if (serverRetries != null) { try { _serverRetries = Integer.parseInt(serverRetries); } catch (NumberFormatException nfe) { _serverRetries = DEFAULT_SERVER_RETRIES; } } _currentServerRetry = 0; _currentBrokerIndex = index; } public void setRetries(int maxRetries) { _cycleRetries = maxRetries; } public String methodName() { return "Cycle Servers"; } public String toString() { StringBuffer sb = new StringBuffer(); sb.append("Cycle Servers:\n"); sb.append("Cycle Retries:"); sb.append(_cycleRetries); sb.append("\nCurrent Cycle:"); sb.append(_currentCycleRetries); sb.append("\nServer Retries:"); sb.append(_serverRetries); sb.append("\nCurrent Retry:"); sb.append(_currentServerRetry); sb.append("\nCurrent Broker:"); sb.append(_currentBrokerIndex); sb.append("\n"); for (int i = 0; i < _connectionDetails.getBrokerCount(); i++) { if (i == _currentBrokerIndex) { sb.append(">"); } sb.append(_connectionDetails.getBrokerDetails(i)); sb.append("\n"); } return sb.toString(); } }
/** * Copyright (C) FuseSource, Inc. * http://fusesource.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.fabric8.service; import static io.fabric8.utils.Ports.mapPortToRange; import java.util.Collection; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import io.fabric8.api.ContainerAutoScaler; import io.fabric8.api.ContainerAutoScalerFactory; import io.fabric8.utils.PasswordEncoder; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.karaf.admin.management.AdminServiceMBean; import io.fabric8.api.Constants; import io.fabric8.api.Container; import io.fabric8.api.ContainerProvider; import io.fabric8.api.CreateChildContainerMetadata; import io.fabric8.api.CreateChildContainerOptions; import io.fabric8.api.CreateEnsembleOptions; import io.fabric8.api.CreationStateListener; import io.fabric8.api.DataStore; import io.fabric8.api.FabricService; import io.fabric8.api.PortService; import io.fabric8.api.Profile; import io.fabric8.api.jcip.ThreadSafe; import io.fabric8.api.scr.AbstractComponent; import io.fabric8.api.scr.ValidatingReference; import io.fabric8.internal.ContainerImpl; import io.fabric8.internal.ProfileOverlayImpl; import io.fabric8.utils.AuthenticationUtils; import io.fabric8.utils.Ports; import io.fabric8.zookeeper.ZkDefs; @ThreadSafe @Component(name = "io.fabric8.container.provider.child", label = "Fabric8 Child Container Provider", immediate = true, metatype = false) @Service(ContainerProvider.class) public final class ChildContainerProvider extends AbstractComponent implements ContainerProvider<CreateChildContainerOptions, CreateChildContainerMetadata>, ContainerAutoScalerFactory { private static final String SCHEME = "child"; @Reference(referenceInterface = FabricService.class) private final ValidatingReference<FabricService> fabricService = new ValidatingReference<FabricService>(); @Activate void activate() { activateComponent(); } @Deactivate void deactivate() { deactivateComponent(); } @Override public CreateChildContainerOptions.Builder newBuilder() { return CreateChildContainerOptions.builder(); } @Override public CreateChildContainerMetadata create(final CreateChildContainerOptions options, final CreationStateListener listener) throws Exception { assertValid(); final Container parent = fabricService.get().getContainer(options.getParent()); ContainerTemplate containerTemplate = new ContainerTemplate(parent, options.getJmxUser(), options.getJmxPassword(), false); return containerTemplate.execute(new ContainerTemplate.AdminServiceCallback<CreateChildContainerMetadata>() { public CreateChildContainerMetadata doWithAdminService(AdminServiceMBean adminService) throws Exception { return doCreate(adminService, options, listener, parent); } }); } private CreateChildContainerMetadata doCreate(AdminServiceMBean adminService, CreateChildContainerOptions options, CreationStateListener listener, final Container parent) throws Exception { StringBuilder jvmOptsBuilder = new StringBuilder(); String zkPasswordEncode = System.getProperty("zookeeper.password.encode", "true"); jvmOptsBuilder.append("-server -Dcom.sun.management.jmxremote") .append(options.getZookeeperUrl() != null ? " -Dzookeeper.url=\"" + options.getZookeeperUrl() + "\"" : "") .append(zkPasswordEncode != null ? " -Dzookeeper.password.encode=\"" + zkPasswordEncode + "\"" : "") .append(options.getZookeeperPassword() != null ? " -Dzookeeper.password=\"" + options.getZookeeperPassword() + "\"" : ""); if (options.getJvmOpts() == null || !options.getJvmOpts().contains("-Xmx")) { jvmOptsBuilder.append(" -Xmx512m"); } if (options.isEnsembleServer()) { jvmOptsBuilder.append(" ").append(CreateEnsembleOptions.ENSEMBLE_AUTOSTART+"=true"); } if (options.getJvmOpts() != null && !options.getJvmOpts().isEmpty()) { jvmOptsBuilder.append(" ").append(options.getJvmOpts()); } if (options.getJvmOpts() == null || !options.getJvmOpts().contains("-XX:+UnlockDiagnosticVMOptions -XX:+UnsyncloadClass")) { jvmOptsBuilder.append(" -XX:+UnlockDiagnosticVMOptions -XX:+UnsyncloadClass"); } if (options.getBindAddress() != null && !options.getBindAddress().isEmpty()) { jvmOptsBuilder.append(" -D" + ZkDefs.BIND_ADDRESS + "=" + options.getBindAddress()); } if (options.getResolver() != null && !options.getResolver().isEmpty()) { jvmOptsBuilder.append(" -D" + ZkDefs.LOCAL_RESOLVER_PROPERTY + "=" + options.getResolver()); } if (options.getManualIp() != null && !options.getManualIp().isEmpty()) { jvmOptsBuilder.append(" -D" + ZkDefs.MANUAL_IP + "=" + options.getManualIp()); } FabricService fservice = fabricService.get(); Map<String, String> dataStoreProperties = new HashMap<String, String>(options.getDataStoreProperties()); dataStoreProperties.put(DataStore.DATASTORE_TYPE_PROPERTY, fservice.getDataStore().getType()); for (Map.Entry<String, String> dataStoreEntries : options.getDataStoreProperties().entrySet()) { String key = dataStoreEntries.getKey(); String value = dataStoreEntries.getValue(); jvmOptsBuilder.append(" -D" + Constants.DATASTORE_TYPE_PID +"." + key + "=" + value); } Profile profile = parent.getVersion().getProfile("default"); Profile defaultProfile = new ProfileOverlayImpl(profile, fservice.getEnvironment(), true, fservice); String featuresUrls = collectionAsString(defaultProfile.getRepositories()); Set<String> features = new LinkedHashSet<String>(); features.add("fabric-agent"); features.add("fabric-git"); //features.addAll(defaultProfile.getFeatures()); String containerName = options.getName(); PortService portService = fservice.getPortService(); Set<Integer> usedPorts = portService.findUsedPortByHost(parent); CreateChildContainerMetadata metadata = new CreateChildContainerMetadata(); metadata.setCreateOptions(options); metadata.setContainerName(containerName); int minimumPort = parent.getMinimumPort(); int maximumPort = parent.getMaximumPort(); fservice.getDataStore().setContainerAttribute(containerName, DataStore.ContainerAttribute.PortMin, String.valueOf(minimumPort)); fservice.getDataStore().setContainerAttribute(containerName, DataStore.ContainerAttribute.PortMax, String.valueOf(maximumPort)); inheritAddresses(fservice, parent.getId(), containerName, options); //We are creating a container instance, just for the needs of port registration. Container child = new ContainerImpl(parent, containerName, fservice) { @Override public String getIp() { return parent.getIp(); } }; int sshFrom = mapPortToRange(Ports.DEFAULT_KARAF_SSH_PORT , minimumPort, maximumPort); int sshTo = mapPortToRange(Ports.DEFAULT_KARAF_SSH_PORT + 100 , minimumPort, maximumPort); int sshPort = portService.registerPort(child, "org.apache.karaf.shell", "sshPort", sshFrom, sshTo, usedPorts); int httpFrom = mapPortToRange(Ports.DEFAULT_HTTP_PORT , minimumPort, maximumPort); int httpTo = mapPortToRange(Ports.DEFAULT_HTTP_PORT + 100 , minimumPort, maximumPort); portService.registerPort(child, "org.ops4j.pax.web", "org.osgi.service.http.port", httpFrom, httpTo, usedPorts); int rmiServerFrom = mapPortToRange(Ports.DEFAULT_RMI_SERVER_PORT , minimumPort, maximumPort); int rmiServerTo = mapPortToRange(Ports.DEFAULT_RMI_SERVER_PORT + 100 , minimumPort, maximumPort); int rmiServerPort = portService.registerPort(child, "org.apache.karaf.management", "rmiServerPort", rmiServerFrom, rmiServerTo, usedPorts); int rmiRegistryFrom = mapPortToRange(Ports.DEFAULT_RMI_REGISTRY_PORT , minimumPort, maximumPort); int rmiRegistryTo = mapPortToRange(Ports.DEFAULT_RMI_REGISTRY_PORT + 100 , minimumPort, maximumPort); int rmiRegistryPort = portService.registerPort(child, "org.apache.karaf.management", "rmiRegistryPort", rmiRegistryFrom, rmiRegistryTo, usedPorts); try { adminService.createInstance(containerName, sshPort, rmiRegistryPort, rmiServerPort, null, jvmOptsBuilder.toString(), collectionAsString(features), featuresUrls); adminService.startInstance(containerName, null); } catch (Throwable t) { metadata.setFailure(t); } return metadata; } @Override public void start(final Container container) { assertValid(); getContainerTemplateForChild(container).execute(new ContainerTemplate.AdminServiceCallback<Object>() { public Object doWithAdminService(AdminServiceMBean adminService) throws Exception { adminService.startInstance(container.getId(), null); return null; } }); } @Override public void stop(final Container container) { assertValid(); getContainerTemplateForChild(container).execute(new ContainerTemplate.AdminServiceCallback<Object>() { public Object doWithAdminService(AdminServiceMBean adminService) throws Exception { adminService.stopInstance(container.getId()); return null; } }); } @Override public void destroy(final Container container) { assertValid(); getContainerTemplateForChild(container).execute(new ContainerTemplate.AdminServiceCallback<Object>() { public Object doWithAdminService(AdminServiceMBean adminService) throws Exception { adminService.destroyInstance(container.getId()); return null; } }); } @Override public String getScheme() { return SCHEME; } @Override public Class<CreateChildContainerOptions> getOptionsType() { return CreateChildContainerOptions.class; } @Override public Class<CreateChildContainerMetadata> getMetadataType() { return CreateChildContainerMetadata.class; } @Override public ContainerAutoScaler createAutoScaler() { return new ChildAutoScaler(this); } /** * Returns the {@link ContainerTemplate} of the parent of the specified child {@link Container}. */ private ContainerTemplate getContainerTemplateForChild(Container container) { CreateChildContainerOptions options = (CreateChildContainerOptions) container.getMetadata().getCreateOptions(); String username = AuthenticationUtils.retrieveJaasUser(); String password = AuthenticationUtils.retrieveJaasPassword(); if (username != null && password != null) { options = (CreateChildContainerOptions) options.updateCredentials(username, password); } return new ContainerTemplate(container.getParent(), options.getJmxUser(), options.getJmxPassword(), false); } /** * Links child container resolver and addresses to its parents resolver and addresses. */ private void inheritAddresses(FabricService service, String parent, String name, CreateChildContainerOptions options) throws Exception { if (options.getManualIp() != null) { service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.ManualIp, options.getManualIp()); } else { service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.ManualIp, "${zk:" + parent + "/manualip}"); } //Link to the addresses from the parent container. service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.LocalHostName, "${zk:" + parent + "/localhostname}"); service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.LocalIp, "${zk:" + parent + "/localip}"); service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.PublicIp, "${zk:" + parent + "/publicip}"); if (options.getResolver() != null) { service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.Resolver, options.getResolver()); } else { service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.Resolver, "${zk:" + parent + "/resolver}"); } if (options.getBindAddress() != null) { service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.BindAddress, options.getBindAddress()); } else { service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.BindAddress, "${zk:" + parent + "/bindaddress}"); } service.getDataStore().setContainerAttribute(name, DataStore.ContainerAttribute.Ip, "${zk:" + name + "/${zk:" + name + "/resolver}}"); } FabricService getFabricService() { return fabricService.get(); } private static String collectionAsString(Collection<String> value) { StringBuilder sb = new StringBuilder(); boolean first = true; if (value != null) { for (String el : value) { if (first) { first = false; } else { sb.append(","); } sb.append(el); } } return sb.toString(); } void bindFabricService(FabricService fabricService) { this.fabricService.bind(fabricService); } void unbindFabricService(FabricService fabricService) { this.fabricService.unbind(fabricService); } }
package com.graphhopper.jsprit.core.reporting; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import com.graphhopper.jsprit.core.problem.Location; import com.graphhopper.jsprit.core.problem.SizeDimension; import com.graphhopper.jsprit.core.problem.job.AbstractJob; import com.graphhopper.jsprit.core.problem.solution.route.activity.AbstractActivity; import com.graphhopper.jsprit.core.problem.solution.route.activity.End; import com.graphhopper.jsprit.core.problem.solution.route.activity.JobActivity; import com.graphhopper.jsprit.core.problem.solution.route.activity.Start; import com.graphhopper.jsprit.core.problem.solution.route.activity.TimeWindow; import com.graphhopper.jsprit.core.problem.solution.route.activity.TourActivity; import hu.vissy.texttable.column.ColumnDefinition; import hu.vissy.texttable.contentformatter.CellContentFormatter; import hu.vissy.texttable.dataconverter.DataConverter; import hu.vissy.texttable.dataconverter.NumberDataConverter; import hu.vissy.texttable.dataextractor.StatefulDataExtractor; public class RouteDetailsConfig extends ColumnConfigBase { private static final String[] PRIORITY_NAMES = new String[] { "", /* 1 */ "highest", /* 2 */ "very high", /* 3 */ "high", /* 4 */ "above medium", /* 5 */ "medium", /* 6 */ "below medium", /* 7 */ "low", /* 8 */ "very low", /* 9 */ "extreme low", /* 10 */ "lowest", }; private static class SizeDimensionAggregator { SizeDimension size; } private static class PrevActivityHolder { TourActivity prevAct; } private static class CostAggregator { int cost; TourActivity prevAct; } private static final DataConverter<SizeDimension> SIZE_DIMENSION_CONVERTER = sd -> { if (sd != null) return IntStream.range(0, sd.getNuOfDimensions()).mapToObj(i -> "" + sd.get(i)) .collect(Collectors.joining(", ", "[", "]")); else return null; }; public enum DisplayMode { NUMERIC { @Override List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> pickColumns( ColumnDefinition<RouteDeatailsRecord, ?, ?> numeric, ColumnDefinition<RouteDeatailsRecord, ?, ?> human) { return Collections.singletonList(numeric); } }, HUMAN { @Override List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> pickColumns( ColumnDefinition<RouteDeatailsRecord, ?, ?> numeric, ColumnDefinition<RouteDeatailsRecord, ?, ?> human) { return Collections.singletonList(human); } }, BOTH { @Override List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> pickColumns( ColumnDefinition<RouteDeatailsRecord, ?, ?> numeric, ColumnDefinition<RouteDeatailsRecord, ?, ?> human) { List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> res = new ArrayList<>(); res.add(numeric); res.add(human); return res; } }; abstract List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> pickColumns( ColumnDefinition<RouteDeatailsRecord, ?, ?> numeric, ColumnDefinition<RouteDeatailsRecord, ?, ?> human); } public enum Column { ROUTE_NUMBER { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, Integer>() .withTitle("route") .withDataExtractor(r -> r.getRoute().getId()) .withCellContentFormatter(CellContentFormatter.rightAlignedCell()) .withDataConverter(NumberDataConverter.defaultIntegerFormatter()) .build()); } }, VEHICLE_NAME { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, String>() .withTitle("vehicle") .withDataExtractor(r -> r.getRoute().getVehicle().getId()).build()); } }, ACTIVITY_TYPE { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, String>() .withTitle("activity") .withDataExtractor( r -> ((AbstractActivity) r.getActivity()).getType()) .build()); } }, JOB_NAME { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, String>() .withTitle("job name").withDataExtractor(r -> { AbstractJob job = r.getJob(); return job == null ? null : job.getId(); }) .build()); } }, JOB_TYPE { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, String>() .withTitle("job type").withDataExtractor(r -> { AbstractJob job = r.getJob(); return job == null ? null : job.getClass().getSimpleName(); }).build()); } }, JOB_PRIORITY { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { Function<RouteDeatailsRecord, Integer> dataExtractorCallback = r -> { AbstractJob job = r.getJob(); return job == null ? null : job.getPriority(); }; return routeDetailsConfig.displayMode.pickColumns( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, Integer>() .withTitle("priority") .withCellContentFormatter(CellContentFormatter.centeredCell()) .withDataExtractor(dataExtractorCallback) .build(), new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, Integer>() .withTitle("priority (HR)") .withCellContentFormatter(CellContentFormatter.centeredCell()) .withDataConverter(data -> data == null ? "" : PRIORITY_NAMES[data] + "(" + data + ")") .withDataExtractor(dataExtractorCallback).build() ); } }, LOCATION { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, String>() .withTitle("location").withDataExtractor(r -> { TourActivity act = r.getActivity(); Location loc = act.getLocation(); return loc == null ? null : loc.getId(); }).build()); } }, LOAD_CHANGE { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord,SizeDimension>() .withTitle("load change") .withDataConverter(SIZE_DIMENSION_CONVERTER) .withDataExtractor(r -> { TourActivity act = r.getActivity(); if (act instanceof Start) return r.calculateInitialLoad(); else return act.getLoadChange(); }).build()); } }, ROUTE_LOAD { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatefulBuilder<RouteDeatailsRecord, SizeDimensionAggregator, SizeDimension>() .withTitle("load").withDataConverter(SIZE_DIMENSION_CONVERTER) .withDataExtractor(new StatefulDataExtractor<>((r, s) -> { TourActivity act = r.getActivity(); if (act instanceof Start) { s.size = r.calculateInitialLoad(); } else { s.size = s.size.add(act.getLoadChange()); } return s.size; }, SizeDimensionAggregator::new, (k, s) -> null)).build()); } }, TIME_WINDOWS { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { Function<RouteDeatailsRecord, Collection<TimeWindow>> dataExtractorCallback = r -> { TourActivity act = r.getActivity(); if (act instanceof JobActivity) return ((JobActivity) act).getTimeWindows(); else return null; }; return routeDetailsConfig.displayMode.pickColumns( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, Collection<TimeWindow>>() .withTitle("time windows").withDataConverter( tws -> routeDetailsConfig.formatTimeWindowsNumeric(tws)) .withDataExtractor(dataExtractorCallback).build(), new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, Collection<TimeWindow>>() .withTitle("time windows (HR)").withDataConverter( tws -> routeDetailsConfig.formatTimeWindowsHuman(tws)) .withDataExtractor(dataExtractorCallback).build() ); } }, OPERATION_DURATION { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return createTimeColumns(routeDetailsConfig, "opTime", routeDetailsConfig.getDurationFormatter(), r -> { TourActivity act = r.getActivity(); return (long) act.getOperationTime(); }); } }, TRAVEL_DURATION { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return createStatefulDurationColumns(routeDetailsConfig, "travel", new StatefulDataExtractor<RouteDeatailsRecord, PrevActivityHolder, Long>( (r, s) -> { TourActivity act = r.getActivity(); if (act instanceof Start) { s.prevAct = null; } long val = (long) (r .getTransportTime(s.prevAct)); s.prevAct = act; return val; }, PrevActivityHolder::new, (k, s) -> null)); } }, WAITING { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return createTimeColumns(routeDetailsConfig, "waitng", routeDetailsConfig.getDurationFormatter(), (r) -> { TourActivity act = r.getActivity(); if (act instanceof Start || act instanceof End) return null; else return (long) (act.getEndTime() - act.getOperationTime() - act.getArrTime()); }); } }, ACTIVITY_DURATION { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return createStatefulDurationColumns(routeDetailsConfig, "duration", new StatefulDataExtractor<RouteDeatailsRecord, PrevActivityHolder, Long>( (r, s) -> { TourActivity act = r.getActivity(); if (act instanceof Start) { s.prevAct = null; } long val = (long) (r.getTransportTime(s.prevAct) + act.getOperationTime()); s.prevAct = act; return val; }, PrevActivityHolder::new, (k, s) -> null)); } }, ARRIVAL_TIME { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return createTimeColumns(routeDetailsConfig, "arrival", routeDetailsConfig.getTimeFormatter(), r -> { TourActivity act = r.getActivity(); if (act instanceof Start) return null; else return (long) act.getArrTime(); }); } }, START_TIME { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return createTimeColumns(routeDetailsConfig, "start", routeDetailsConfig.getTimeFormatter(), r -> { TourActivity act = r.getActivity(); if (act instanceof End) return null; else return (long) (act.getEndTime() - act.getOperationTime()); }); } }, END_TIME { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return createTimeColumns(routeDetailsConfig, "end", routeDetailsConfig.getTimeFormatter(), r -> { TourActivity act = r.getActivity(); if (act instanceof End) return null; else return (long) act.getEndTime(); }); } }, SELECTED_TIME_WINDOW { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { Function<RouteDeatailsRecord, TimeWindow> dataExtractorCallback = r -> { TourActivity act = r.getActivity(); if (act instanceof JobActivity) { Optional<TimeWindow> optTw = ((JobActivity) act) .getTimeWindows().stream() .filter(tw -> tw.contains( act.getEndTime() - act.getOperationTime())) .findAny(); if (optTw.isPresent()) return optTw.get(); else return null; } else return null; }; return routeDetailsConfig.displayMode.pickColumns( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, TimeWindow>() .withTitle("selected tw") .withDataConverter( tw -> routeDetailsConfig.formatTimeWindowNumeric(tw)) .withDataExtractor(dataExtractorCallback).build(), new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, TimeWindow>() .withTitle("selected tw (HR)") .withDataConverter( tw -> routeDetailsConfig.formatTimeWindowHuman(tw)) .withDataExtractor(dataExtractorCallback).build()); } }, TRANSPORT_COST { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return createStatefulCostColumns(routeDetailsConfig, "transCost", new StatefulDataExtractor<RouteDeatailsRecord, PrevActivityHolder, Integer>( (r, s) -> { TourActivity act = r.getActivity(); if (act instanceof Start) { s.prevAct = null; } double res = r.getTransportCost(s.prevAct); s.prevAct = act; return (int) res; }, PrevActivityHolder::new, (k, s) -> null)); } }, ACTIVITY_COST { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord,Integer>() .withTitle("actCost") .withDataExtractor(r -> (int) r.getActivityCost()).build()); } }, ROUTE_COST { @Override public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig) { return Collections.singletonList( new ColumnDefinition.StatefulBuilder<RouteDeatailsRecord, CostAggregator, Integer>() .withTitle("routeCost") .withCellContentFormatter(CellContentFormatter.rightAlignedCell()) .withDataExtractor(new StatefulDataExtractor<RouteDeatailsRecord, CostAggregator, Integer>( (r, s) -> { TourActivity act = r.getActivity(); if (act instanceof Start) { s.prevAct = null; s.cost = 0; } Double trCost = r.getTransportCost(s.prevAct); s.prevAct = act; if (trCost != null) { s.cost += trCost; } s.cost += r.getActivityCost(); return s.cost; }, CostAggregator::new, (k, s) -> null) ).build()); } }, ; public abstract List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createColumns( RouteDetailsConfig routeDetailsConfig); private static List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createTimeColumns( RouteDetailsConfig routeDetailsConfig, String title, DataConverter<Long> converter, Function<RouteDeatailsRecord, Long> getter) { return routeDetailsConfig.displayMode.pickColumns( new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, Long>() .withTitle(title).withDataExtractor(getter) .withCellContentFormatter(CellContentFormatter.rightAlignedCell()) .build(), new ColumnDefinition.StatelessBuilder<RouteDeatailsRecord, Long>() .withTitle(title + " (HR)") .withCellContentFormatter(CellContentFormatter.rightAlignedCell()) .withDataConverter(converter) .withDataExtractor(getter).build()); } private static List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createStatefulDurationColumns( RouteDetailsConfig routeDetailsConfig, String title, StatefulDataExtractor<RouteDeatailsRecord, PrevActivityHolder, Long> getter) { return routeDetailsConfig.displayMode.pickColumns( new ColumnDefinition.StatefulBuilder<RouteDeatailsRecord, PrevActivityHolder, Long>() .withTitle(title) .withCellContentFormatter(CellContentFormatter.rightAlignedCell()) .withDataExtractor(getter) .build(), new ColumnDefinition.StatefulBuilder<RouteDeatailsRecord, PrevActivityHolder, Long>() .withTitle(title+" (HR)") .withCellContentFormatter(CellContentFormatter.rightAlignedCell()) .withDataConverter(dur -> routeDetailsConfig.formatDurationHuman(dur)) .withDataExtractor(getter) .build()); } private static List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> createStatefulCostColumns( RouteDetailsConfig routeDetailsConfig, String title, StatefulDataExtractor<RouteDeatailsRecord, PrevActivityHolder, Integer> getter) { return Collections.singletonList( new ColumnDefinition.StatefulBuilder<RouteDeatailsRecord, PrevActivityHolder, Integer>() .withTitle(title) .withCellContentFormatter(CellContentFormatter.rightAlignedCell()) .withDataExtractor(getter).build()); } } public static class Builder { private LocalDateTime humanReadableOrigin = LocalDateTime.of(LocalDate.now(), LocalTime.MIDNIGHT); private DisplayMode displayMode = DisplayMode.NUMERIC; private List<Column> columns; private ChronoUnit lowUnit = ChronoUnit.SECONDS; private ChronoUnit highUnit = ChronoUnit.HOURS; public Builder() { this.columns = new ArrayList<>(); } public Builder withHumanReadableOrigin(LocalDateTime humanReadableOrigin) { this.humanReadableOrigin = humanReadableOrigin; return this; } public Builder withTimeDisplayMode(DisplayMode displayMode) { this.displayMode = displayMode; return this; } public Builder withLowUnit(ChronoUnit lowUnit) { this.lowUnit = lowUnit; return this; } public Builder withHighUnit(ChronoUnit highUnit) { this.highUnit = highUnit; return this; } public Builder withColumn(Column columns) { this.columns.add(columns); return this; } public Builder withColumns(Column... columns) { for (Column c : columns) { withColumn(c); } return this; } public RouteDetailsConfig build() { return new RouteDetailsConfig(this); } } private DisplayMode displayMode; private List<Column> columns; private RouteDetailsConfig(Builder builder) { this.displayMode = builder.displayMode; this.columns = builder.columns; setTimeFormatter( new HumanReadableTimeFormatter(builder.humanReadableOrigin, builder.lowUnit)); setDurationFormatter(new HumanReadableDurationFormatter(builder.lowUnit, builder.highUnit)); } public DisplayMode getDisplayMode() { return displayMode; } public List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> getColumns() { List<ColumnDefinition<RouteDeatailsRecord, ?, ?>> columns = new ArrayList<>(); this.columns.forEach(c -> columns.addAll(c.createColumns(this))); return columns; } }
/* * Copyright 2012 International Business Machines Corp. * * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. Licensed under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.jbatch.container.jsl.impl; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.net.URL; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.HashMap; import java.util.Properties; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBElement; import javax.xml.bind.JAXBException; import javax.xml.bind.Unmarshaller; import javax.xml.transform.stream.StreamSource; import com.ibm.jbatch.container.jsl.ModelResolver; import com.ibm.jbatch.jsl.model.JSLJob; import com.ibm.jbatch.jsl.model.Step; import com.ibm.jbatch.jsl.util.ValidatorHelper; import com.ibm.jbatch.jsl.util.JSLValidationEventHandler; public class JobModelResolverImpl implements ModelResolver<JSLJob> { public JobModelResolverImpl() { super(); } private JSLJob unmarshalJobXML(String jobXML) { Object result = null; JSLJob job = null; JSLValidationEventHandler handler = new JSLValidationEventHandler(); try { JAXBContext ctx = JAXBContext.newInstance("com.ibm.jbatch.jsl.model"); Unmarshaller u = ctx.createUnmarshaller(); u.setSchema(ValidatorHelper.getXJCLSchema()); u.setEventHandler(handler); // Use this for anonymous type //job = (Job)u.unmarshal(new StreamSource(new StringReader(jobXML))); // Use this for named complex type result = u.unmarshal(new StreamSource(new StringReader(jobXML))); } catch (JAXBException e) { throw new IllegalArgumentException("Exception unmarshalling jobXML", e); } if (handler.eventOccurred()) { throw new IllegalArgumentException("xJCL invalid per schema, see SysOut for now for details"); } job = ((JAXBElement<JSLJob>)result).getValue(); return job; } private JSLJob getJslJobInheritance(String jobId) throws IOException { JSLJob jslJob = null; InputStream indexFileUrl = JobModelResolverImpl.class.getResourceAsStream("/META-INF/jobinheritance"); if (indexFileUrl != null) { Properties index = new Properties(); index.load(indexFileUrl); if (index.getProperty(jobId) != null) { URL parentUrl = JobModelResolverImpl.class.getResource(index.getProperty(jobId)); String parentXml = readJobXML(parentUrl.getFile()); jslJob = resolveModel(parentXml); } } return jslJob; } private String readJobXML(String fileWithPath) throws FileNotFoundException, IOException { StringBuffer jobXMLBuffer = ( fileWithPath==null ? null : new StringBuffer() ); if ( !(fileWithPath==null) ) { BufferedReader zin = new BufferedReader( new FileReader( new File(fileWithPath))); String input = zin.readLine(); do { if (input != null) { //jobXMLBuffer.append( input.trim() ); jobXMLBuffer.append(input); input = zin.readLine(); } } while (input!=null); } return ( jobXMLBuffer==null ? null : jobXMLBuffer.toString() ); } @Override public JSLJob resolveModel(String jobXML) { final String finalJobXML = jobXML; JSLJob jslJob = AccessController.doPrivileged( new PrivilegedAction<JSLJob>() { public JSLJob run() { return unmarshalJobXML(finalJobXML); } }); return jslJob; } // FIXME These maps need to move to the xJCL Repository private static HashMap<String, JSLJob> jobid2InstanceMap = new HashMap<String, JSLJob>(); private static HashMap<String, Step> stepid2InstanceMap = new HashMap<String, Step>(); @Override public JSLJob resolveModel(JSLJob t) { // TODO Auto-generated method stub // was this intended for inheritance? return null; } // FIXME HashMap<String, Split> splitid2InstanceMap = new HashMap<String,Split>(); // FIXME HashMap<String, Flow> flowid2InstanceMap = new HashMap<String,Flow>(); // // This is where we will implement job/step inheritance, though we don't at // the moment. // /* public static ResolvedJob resolveJob(Job job) { ArrayList<ResolvedStep> steps = new ArrayList<ResolvedStep>(); ArrayList<ResolvedDecision> decisions = new ArrayList<ResolvedDecision>(); ArrayList<ResolvedSplit> splits = new ArrayList<ResolvedSplit>(); ArrayList<ResolvedFlow> flows = new ArrayList<ResolvedFlow>(); ResolvedJob resolvedJob = new ResolvedJob(job.getId(), steps, decisions, splits, flows); for (Object next : job.getControlElements()) { if (next instanceof Step) { steps.add(new ResolvedStep(resolvedJob, (Step) next)); } else if (next instanceof Decision) { decisions.add(new ResolvedDecision(resolvedJob, (Decision) next)); } else if (next instanceof Split) { splits.add(new ResolvedSplit(resolvedJob, (Split) next)); } else if (next instanceof Flow) { flows.add(new ResolvedFlow(resolvedJob, (Flow) next)); } } return resolvedJob; } //FIXME We started implementing job inheritance here. Set to private so no one uses this yet. private static ResolvedJob resolveModel(Job leafJob) { String parentID = leafJob.getParent(); Job resolvedJob = resolveModel(leafJob, parentID); // FIXME you need to create a new ResolvedJob here. return null; } private static Job resolveModel(Job leafJob, String parentID) { if (!parentID.equals("")) { Job parentJob = jobid2InstanceMap.get(parentID); if (parentJob == null) { throw new BatchContainerRuntimeException(new IllegalArgumentException(), "The parent job id '" + parentID + "' on Job id '" + leafJob.getParent() + " cannot be found"); } // add all the attributes, steps, flows, and splits from the parent // to child if they don't exist on child leafJob.getControlElements().addAll(parentJob.getControlElements()); return resolveModel(leafJob, parentJob.getParent()); } for (Object next : leafJob.getControlElements()) { if (next instanceof Step) { resolveModel((Step)next); } else if (next instanceof Split) { //resolveModel((Split)next); } else if (next instanceof Flow) { //resolveModel((Flow)next); } } return leafJob; } //FIXME Set to private so no one uses this yet. private static ResolvedStep resolveModel(Step leafStep) { String parentID = leafStep.getParent(); Step resolvedStep = resolveModel(leafStep, parentID); // FIXME you need to clone the step to a resolved step return null; } private static Step resolveModel(Step leafStep, String parentID) { if (!parentID.equals("")) { Step parentStep = stepid2InstanceMap.get(parentID); if (parentStep == null) { throw new BatchContainerRuntimeException(new IllegalArgumentException(), "The parent step id '" + parentID + "' on Step id '" + leafStep.getParent() + " cannot be found"); } // add all the attributes, batchlets, chunks...etc from a parent // step if they don't // exist on the child step // leafStep.getXXX().addAll(parentStep.getXXX()); return resolveModel(leafStep, parentStep.getParent()); } // batchlet // // resolve chunks // next, startlimit ...etc // FIXME ... return leafStep; } */ }
package pl.poznan.put.promethee.xmcda; import org.xmcda.*; import pl.poznan.put.promethee.exceptions.InputDataException; import java.util.*; import java.util.stream.Collectors; /** * Created by Maciej Uniejewski on 2016-12-26. */ public class InputsHandler { private static final String ALTERNATIVES_TAG_ERROR = "There is a problem with an assignments list - it cannot be read."; private InputsHandler() { } public static class Inputs { private List<String> alternativesIds; private List<String> categoriesIds; private String assignmentType; private Map<String, Map<String, String>> assignments; private Map<String, Integer> categoriesRanking; public List<String> getAlternativesIds() { return alternativesIds; } public void setAlternativesIds(List<String> alternativesIds) { this.alternativesIds = alternativesIds; } public List<String> getCategoriesIds() { return categoriesIds; } public void setCategoriesIds(List<String> categoriesIds) { this.categoriesIds = categoriesIds; } public String getAssignmentType() { return assignmentType; } public void setAssignmentType(String assignmentType) { this.assignmentType = assignmentType; } public Map<String, Map<String, String>> getAssignments() { return assignments; } public void setAssignments(Map<String, Map<String, String>> assignments) { this.assignments = assignments; } public Map<String, Integer> getCategoriesRanking() { return categoriesRanking; } public void setCategoriesRanking(Map<String, Integer> categoriesRanking) { this.categoriesRanking = categoriesRanking; } } public static Inputs checkAndExtractInputs(XMCDA xmcda, ProgramExecutionResult xmcdaExecResults) { Inputs inputsDict = checkInputs(xmcda, xmcdaExecResults); if (xmcdaExecResults.isError()) return null; return inputsDict; } protected static Inputs checkInputs(XMCDA xmcda, ProgramExecutionResult errors) { Inputs inputs = new Inputs(); try { checkAndExtractAlternatives(inputs, xmcda, errors); checkAndExtractCategories(inputs, xmcda, errors); checkCategoriesRanking(inputs, xmcda, errors); sortCategories(inputs); checkAndExtractAssignments(inputs, xmcda, errors); } catch (InputDataException exception) { //Just catch the exceptions and skip other functions } return inputs; } protected static void checkAndExtractAlternatives(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException { if (xmcda.alternatives.isEmpty()) { String errorMessage = "No alternatives list has been supplied"; errors.addError(errorMessage); throw new InputDataException(errorMessage); } List<String> alternativesIds = xmcda.alternatives.getActiveAlternatives().stream().filter(a -> "alternatives".equals(a.getMarker())).map( Alternative::id).collect(Collectors.toList()); if (alternativesIds.isEmpty()) { String errorMessage = "The alternatives list can not be empty"; errors.addError(errorMessage); throw new InputDataException(errorMessage); } inputs.alternativesIds = alternativesIds; } protected static void checkAndExtractCategories(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException { if (xmcda.categories.isEmpty()) { String errorMessage = "No categories has been supplied."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } if (xmcda.categories.size() == 1) { String errorMessage = "You should supply at least 2 categories."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } List<String> categories = xmcda.categories.getActiveCategories().stream().filter(a -> "categories".equals(a.getMarker())).map( Category::id).collect(Collectors.toList()); inputs.setCategoriesIds(categories); if (categories.isEmpty()) { String errorMessage = "The category list can not be empty."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } } protected static void checkCategoriesRanking(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException { if (xmcda.categoriesValuesList.isEmpty()) { String errorMessage = "No categories values list has been supplied"; errors.addError(errorMessage); throw new InputDataException(errorMessage); } if (xmcda.categoriesValuesList.size() > 1) { String errorMessage = "More than one categories values list has been supplied"; errors.addError(errorMessage); throw new InputDataException(errorMessage); } CategoriesValues categoriesValuesList = xmcda.categoriesValuesList.get(0); if (!categoriesValuesList.isNumeric()) { String errorMessage = "Each of the categories ranks must be integer"; errors.addError(errorMessage); throw new InputDataException(errorMessage); } Map<String, Integer> categoriesValues = new LinkedHashMap<>(); checkRanks(categoriesValuesList, categoriesValues, inputs, xmcda, errors); findRankingDuplicates(categoriesValues, errors); } public static void checkRanks(CategoriesValues categoriesValuesList, Map<String, Integer> categoriesValues, Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException { try { CategoriesValues<Integer> categoriesValuesClass = categoriesValuesList.convertTo(Integer.class); xmcda.categoriesValuesList.set(0, categoriesValuesClass); int min = Integer.MAX_VALUE; int max = -1; for (Map.Entry<Category, LabelledQValues<Integer>> a : categoriesValuesClass.entrySet()) { if (a.getValue().get(0).getValue() < min) { min = a.getValue().get(0).getValue(); } if (a.getValue().get(0).getValue() > max) { max = a.getValue().get(0).getValue(); } categoriesValues.put(a.getKey().id(), a.getValue().get(0).getValue()); } if (min != 1) { String errorMessage = "Minimal rank should be equal to 1."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } if (max != inputs.categoriesIds.size()) { String errorMessage = "Maximal rank should be equal to number of categories."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } inputs.setCategoriesRanking(categoriesValues); } catch (InputDataException e) { throw e; } catch (Exception e) { String errorMessage = "An error occurred while checking the categories rank. Remember that each rank has to be integer."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } } public static void findRankingDuplicates(Map<String, Integer> categoriesValues, ProgramExecutionResult errors) throws InputDataException { for (Map.Entry<String, Integer> categoryA : categoriesValues.entrySet()) { for (Map.Entry<String, Integer> categoryB : categoriesValues.entrySet()) { if (categoryA.getValue().equals(categoryB.getValue()) && !categoryA.getKey().equals(categoryB.getKey())) { String errorMessage = "There can not be two categories with the same rank."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } } } } protected static void sortCategories(Inputs inputs) { if (inputs.getCategoriesRanking() == null) { return; } Collections.sort(inputs.categoriesIds, (o1, o2) -> inputs.categoriesRanking.get(o1) - inputs.categoriesRanking.get(o2)); } protected static void checkAndExtractAssignments(Inputs inputs, XMCDA xmcda, ProgramExecutionResult errors) throws InputDataException { if (xmcda.alternativesAssignmentsList.size() != 1) { String errorMessage = "You need to provide one list of alternatives assignments."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } if (xmcda.alternativesAssignmentsList.get(0).isEmpty()) { String errorMessage = "Assignments list can not be empty."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } Set<String> categoriesSet = new HashSet<>(); categoriesSet.addAll(inputs.getCategoriesIds()); inputs.assignments = new HashMap<>(); for (AlternativeAssignment<?> assignment : xmcda.alternativesAssignmentsList.get(0)) { Map<String, String> assignmentMap = new HashMap<>(); if (assignment == null) { errors.addError(ALTERNATIVES_TAG_ERROR); throw new InputDataException(ALTERNATIVES_TAG_ERROR); } String alternativeId = assignment.getAlternative().id(); if (assignment.getCategory() == null) { checkAndExtractIndirectInAssignment(assignment, categoriesSet, assignmentMap, inputs, errors); } else { checkAndExtractDirectInAssignment(assignment, categoriesSet, assignmentMap, inputs, errors); } inputs.assignments.put(alternativeId, assignmentMap); } checkAlternativesInAssignments(inputs, errors); } protected static void checkAndExtractIndirectInAssignment(AlternativeAssignment assignment, Set<String> categoriesSet, Map<String, String> assignmentMap, Inputs inputs, ProgramExecutionResult errors) throws InputDataException { if (assignment.getCategoryInterval() == null || assignment.getCategoryInterval().getLowerBound() == null || assignment.getCategoryInterval().getUpperBound() == null) { errors.addError(ALTERNATIVES_TAG_ERROR); throw new InputDataException(ALTERNATIVES_TAG_ERROR); } String lowerCategory = assignment.getCategoryInterval().getLowerBound().id(); String upperCategory = assignment.getCategoryInterval().getUpperBound().id(); if (!categoriesSet.contains(lowerCategory) || !categoriesSet.contains(upperCategory)) { String errorMessage = "There are some categories in assignment list that were not be added to categories list."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } checkCategories(lowerCategory, upperCategory, inputs, errors); inputs.setAssignmentType("indirect"); assignmentMap.put("LOWER", lowerCategory); assignmentMap.put("UPPER", upperCategory); } protected static void checkCategories(String lower, String upper, Inputs inputs, ProgramExecutionResult errors) throws InputDataException { Integer lowerRank = inputs.getCategoriesRanking().get(lower); Integer upperRank = inputs.getCategoriesRanking().get(upper); if (lowerRank == null || upperRank == null || lowerRank > upperRank) { String errorMessage = "Each lower category in assignments should have better mark then upper category for the same alternative."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } } protected static void checkAndExtractDirectInAssignment(AlternativeAssignment assignment, Set<String> categoriesSet, Map<String, String> assignmentMap, Inputs inputs, ProgramExecutionResult errors) throws InputDataException { if (assignment.getCategory().id() == null) { errors.addError(ALTERNATIVES_TAG_ERROR); throw new InputDataException(ALTERNATIVES_TAG_ERROR); } String category = assignment.getCategory().id(); if (!categoriesSet.contains(category)) { String errorMessage = "There are some categories in assignment list that were not be added to categories list."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } if (inputs.getAssignmentType() == null) { inputs.setAssignmentType("direct"); } assignmentMap.put("LOWER", category); assignmentMap.put("UPPER", category); } protected static void checkAlternativesInAssignments(Inputs inputs, ProgramExecutionResult errors) throws InputDataException { for (int i = 0; i < inputs.getAlternativesIds().size(); i++) { String alternativeId = inputs.getAlternativesIds().get(i); if (inputs.assignments.get(alternativeId) == null) { String errorMessage = "There are some missing alternatives in assignment list."; errors.addError(errorMessage); throw new InputDataException(errorMessage); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.dubbo.registry.consul; import org.apache.dubbo.common.URL; import org.apache.dubbo.common.URLBuilder; import org.apache.dubbo.common.logger.Logger; import org.apache.dubbo.common.logger.LoggerFactory; import org.apache.dubbo.common.utils.CollectionUtils; import org.apache.dubbo.common.utils.NamedThreadFactory; import org.apache.dubbo.common.utils.UrlUtils; import org.apache.dubbo.registry.NotifyListener; import org.apache.dubbo.registry.support.FailbackRegistry; import org.apache.dubbo.rpc.RpcException; import com.ecwid.consul.v1.ConsulClient; import com.ecwid.consul.v1.QueryParams; import com.ecwid.consul.v1.Response; import com.ecwid.consul.v1.agent.model.NewService; import com.ecwid.consul.v1.catalog.CatalogServicesRequest; import com.ecwid.consul.v1.health.HealthServicesRequest; import com.ecwid.consul.v1.health.model.HealthService; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static java.util.concurrent.Executors.newCachedThreadPool; import static org.apache.dubbo.common.constants.CommonConstants.ANY_VALUE; import static org.apache.dubbo.common.constants.ConsulConstants.DEFAULT_PORT; import static org.apache.dubbo.common.constants.ConsulConstants.DEFAULT_WATCH_TIMEOUT; import static org.apache.dubbo.common.constants.ConsulConstants.INVALID_PORT; import static org.apache.dubbo.common.constants.ConsulConstants.WATCH_TIMEOUT; import static org.apache.dubbo.common.constants.RegistryConstants.CATEGORY_KEY; import static org.apache.dubbo.common.constants.RegistryConstants.EMPTY_PROTOCOL; import static org.apache.dubbo.registry.Constants.CONSUMER_PROTOCOL; import static org.apache.dubbo.registry.Constants.PROVIDER_PROTOCOL; import static org.apache.dubbo.registry.consul.AbstractConsulRegistry.CHECK_PASS_INTERVAL; import static org.apache.dubbo.registry.consul.AbstractConsulRegistry.DEFAULT_CHECK_PASS_INTERVAL; import static org.apache.dubbo.registry.consul.AbstractConsulRegistry.DEFAULT_DEREGISTER_TIME; import static org.apache.dubbo.registry.consul.AbstractConsulRegistry.DEREGISTER_AFTER; import static org.apache.dubbo.registry.consul.AbstractConsulRegistry.ONE_THOUSAND; import static org.apache.dubbo.registry.consul.AbstractConsulRegistry.PERIOD_DENOMINATOR; import static org.apache.dubbo.registry.consul.AbstractConsulRegistry.SERVICE_TAG; import static org.apache.dubbo.registry.consul.AbstractConsulRegistry.URL_META_KEY; import static org.apache.dubbo.rpc.Constants.TOKEN_KEY; /** * registry center implementation for consul */ public class ConsulRegistry extends FailbackRegistry { private static final Logger logger = LoggerFactory.getLogger(ConsulRegistry.class); private ConsulClient client; private long checkPassInterval; private ExecutorService notifierExecutor = newCachedThreadPool( new NamedThreadFactory("dubbo-consul-notifier", true)); private ConcurrentMap<URL, ConsulNotifier> notifiers = new ConcurrentHashMap<>(); private ScheduledExecutorService ttlConsulCheckExecutor; /** * The ACL token */ private String token; private static final int CONSUL_CORE_THREAD_SIZE = 1; private static final int DEFAULT_INDEX = -1; private static final int DEFAULT_WAIT_TIME = -1; public ConsulRegistry(URL url) { super(url); token = url.getParameter(TOKEN_KEY, (String) null); String host = url.getHost(); int port = INVALID_PORT != url.getPort() ? url.getPort() : DEFAULT_PORT; client = new ConsulClient(host, port); checkPassInterval = url.getParameter(CHECK_PASS_INTERVAL, DEFAULT_CHECK_PASS_INTERVAL); ttlConsulCheckExecutor = new ScheduledThreadPoolExecutor(CONSUL_CORE_THREAD_SIZE, new NamedThreadFactory("Ttl-Consul-Check-Executor", true)); ttlConsulCheckExecutor.scheduleAtFixedRate(this::checkPass, checkPassInterval / PERIOD_DENOMINATOR, checkPassInterval / PERIOD_DENOMINATOR, TimeUnit.MILLISECONDS); } @Override public void register(URL url) { if (isConsumerSide(url)) { return; } super.register(url); } @Override public void doRegister(URL url) { if (token == null) { client.agentServiceRegister(buildService(url)); } else { client.agentServiceRegister(buildService(url), token); } } @Override public void unregister(URL url) { if (isConsumerSide(url)) { return; } super.unregister(url); } @Override public void doUnregister(URL url) { if (token == null) { client.agentServiceDeregister(buildId(url)); } else { client.agentServiceDeregister(buildId(url), token); } } @Override public void subscribe(URL url, NotifyListener listener) { if (isProviderSide(url)) { return; } super.subscribe(url, listener); } @Override public void doSubscribe(URL url, NotifyListener listener) { Long index; List<URL> urls; if (ANY_VALUE.equals(url.getServiceInterface())) { Response<Map<String, List<String>>> response = getAllServices(DEFAULT_INDEX, buildWatchTimeout(url)); index = response.getConsulIndex(); List<HealthService> services = getHealthServices(response.getValue()); urls = convert(services, url); } else { String service = url.getServiceInterface(); Response<List<HealthService>> response = getHealthServices(service, DEFAULT_INDEX, buildWatchTimeout(url)); index = response.getConsulIndex(); urls = convert(response.getValue(), url); } notify(url, listener, urls); ConsulNotifier notifier = notifiers.computeIfAbsent(url, k -> new ConsulNotifier(url, index)); notifierExecutor.submit(notifier); } @Override public void unsubscribe(URL url, NotifyListener listener) { if (isProviderSide(url)) { return; } super.unsubscribe(url, listener); } @Override public void doUnsubscribe(URL url, NotifyListener listener) { ConsulNotifier notifier = notifiers.remove(url); notifier.stop(); } @Override public List<URL> lookup(URL url) { if (url == null) { throw new IllegalArgumentException("lookup url == null"); } try { String service = url.getServiceKey(); Response<List<HealthService>> result = getHealthServices(service, DEFAULT_INDEX, buildWatchTimeout(url)); if (result == null || result.getValue() == null || result.getValue().isEmpty()) { return new ArrayList<>(); } else { return convert(result.getValue(), url); } } catch (Throwable e) { throw new RpcException("Failed to lookup " + url + " from consul " + getUrl() + ", cause: " + e.getMessage(), e); } } @Override public boolean isAvailable() { return client.getAgentSelf() != null; } @Override public void destroy() { super.destroy(); notifierExecutor.shutdown(); ttlConsulCheckExecutor.shutdown(); } private void checkPass() { for (URL url : getRegistered()) { String checkId = buildId(url); try { if (token == null) { client.agentCheckPass("service:" + checkId); } else { client.agentCheckPass("service:" + checkId, null, token); } if (logger.isDebugEnabled()) { logger.debug("check pass for url: " + url + " with check id: " + checkId); } } catch (Throwable t) { logger.warn("fail to check pass for url: " + url + ", check id is: " + checkId, t); } } } private Response<List<HealthService>> getHealthServices(String service, long index, int watchTimeout) { HealthServicesRequest request = HealthServicesRequest.newBuilder() .setTag(SERVICE_TAG) .setQueryParams(new QueryParams(watchTimeout, index)) .setPassing(true) .setToken(token) .build(); return client.getHealthServices(service, request); } private Response<Map<String, List<String>>> getAllServices(long index, int watchTimeout) { CatalogServicesRequest request = CatalogServicesRequest.newBuilder() .setQueryParams(new QueryParams(watchTimeout, index)) .setToken(token) .build(); return client.getCatalogServices(request); } private List<HealthService> getHealthServices(Map<String, List<String>> services) { return services.entrySet().stream() .filter(s -> s.getValue().contains(SERVICE_TAG)) .map(s -> getHealthServices(s.getKey(), DEFAULT_INDEX, DEFAULT_WAIT_TIME).getValue()) .flatMap(Collection::stream) .collect(Collectors.toList()); } private boolean isConsumerSide(URL url) { return url.getProtocol().equals(CONSUMER_PROTOCOL); } private boolean isProviderSide(URL url) { return url.getProtocol().equals(PROVIDER_PROTOCOL); } private List<URL> convert(List<HealthService> services, URL consumerURL) { if (CollectionUtils.isEmpty(services)) { return emptyURL(consumerURL); } return services.stream() .map(HealthService::getService) .filter(Objects::nonNull) .map(HealthService.Service::getMeta) .filter(m -> m != null && m.containsKey(URL_META_KEY)) .map(m -> m.get(URL_META_KEY)) .map(URL::valueOf) .filter(url -> UrlUtils.isMatch(consumerURL, url)) .collect(Collectors.toList()); } private List<URL> emptyURL(URL consumerURL) { // No Category Parameter URL empty = URLBuilder.from(consumerURL) .setProtocol(EMPTY_PROTOCOL) .removeParameter(CATEGORY_KEY) .build(); List<URL> result = new ArrayList<URL>(); result.add(empty); return result; } private NewService buildService(URL url) { NewService service = new NewService(); service.setAddress(url.getHost()); service.setPort(url.getPort()); service.setId(buildId(url)); service.setName(url.getServiceInterface()); service.setCheck(buildCheck(url)); service.setTags(buildTags(url)); service.setMeta(Collections.singletonMap(URL_META_KEY, url.toFullString())); return service; } private List<String> buildTags(URL url) { Map<String, String> params = url.getParameters(); List<String> tags = params.entrySet().stream() .map(k -> k.getKey() + "=" + k.getValue()) .collect(Collectors.toList()); tags.add(SERVICE_TAG); return tags; } private String buildId(URL url) { // let's simply use url's hashcode to generate unique service id for now return Integer.toHexString(url.hashCode()); } private NewService.Check buildCheck(URL url) { NewService.Check check = new NewService.Check(); check.setTtl((checkPassInterval / ONE_THOUSAND) + "s"); check.setDeregisterCriticalServiceAfter(url.getParameter(DEREGISTER_AFTER, DEFAULT_DEREGISTER_TIME)); return check; } private int buildWatchTimeout(URL url) { return url.getParameter(WATCH_TIMEOUT, DEFAULT_WATCH_TIMEOUT) / ONE_THOUSAND; } private class ConsulNotifier implements Runnable { private URL url; private long consulIndex; private boolean running; ConsulNotifier(URL url, long consulIndex) { this.url = url; this.consulIndex = consulIndex; this.running = true; } @Override public void run() { while (this.running) { if (ANY_VALUE.equals(url.getServiceInterface())) { processServices(); } else { processService(); } } } private void processService() { String service = url.getServiceKey(); Response<List<HealthService>> response = getHealthServices(service, consulIndex, buildWatchTimeout(url)); Long currentIndex = response.getConsulIndex(); if (currentIndex != null && currentIndex > consulIndex) { consulIndex = currentIndex; List<HealthService> services = response.getValue(); List<URL> urls = convert(services, url); for (NotifyListener listener : getSubscribed().get(url)) { doNotify(url, listener, urls); } } } private void processServices() { Response<Map<String, List<String>>> response = getAllServices(consulIndex, buildWatchTimeout(url)); Long currentIndex = response.getConsulIndex(); if (currentIndex != null && currentIndex > consulIndex) { consulIndex = currentIndex; List<HealthService> services = getHealthServices(response.getValue()); List<URL> urls = convert(services, url); for (NotifyListener listener : getSubscribed().get(url)) { doNotify(url, listener, urls); } } } void stop() { this.running = false; } } }
/* Copyright 2014 Immutables Authors and Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.immutables.generator.processor; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.util.Collection; import java.util.List; import javax.lang.model.element.TypeElement; import org.immutables.generator.Intrinsics; import org.immutables.generator.Templates; import org.immutables.generator.processor.ImmutableTrees.ApplyExpression; import org.immutables.generator.processor.ImmutableTrees.AssignGenerator; import org.immutables.generator.processor.ImmutableTrees.Block; import org.immutables.generator.processor.ImmutableTrees.BoundAccessExpression; import org.immutables.generator.processor.ImmutableTrees.Comment; import org.immutables.generator.processor.ImmutableTrees.ConditionalBlock; import org.immutables.generator.processor.ImmutableTrees.ForStatement; import org.immutables.generator.processor.ImmutableTrees.Identifier; import org.immutables.generator.processor.ImmutableTrees.IfStatement; import org.immutables.generator.processor.ImmutableTrees.InvokableDeclaration; import org.immutables.generator.processor.ImmutableTrees.InvokeStatement; import org.immutables.generator.processor.ImmutableTrees.InvokeString; import org.immutables.generator.processor.ImmutableTrees.IterationGenerator; import org.immutables.generator.processor.ImmutableTrees.LetStatement; import org.immutables.generator.processor.ImmutableTrees.ResolvedType; import org.immutables.generator.processor.ImmutableTrees.StringLiteral; import org.immutables.generator.processor.ImmutableTrees.Template; import org.immutables.generator.processor.ImmutableTrees.TextLine; import org.immutables.generator.processor.ImmutableTrees.TransformGenerator; import org.immutables.generator.processor.ImmutableTrees.Unit; import org.immutables.generator.processor.ImmutableTrees.ValueDeclaration; import org.immutables.generator.processor.Trees.Expression; import static org.immutables.generator.StringLiterals.*; /** * This part is written with simples possible writer in mind. It was decided not to use dependencies * like. Its is possible that in future it will be replaced with self bootstraping, i.e. template * generator will be generated by the same framework which generates templates. */ public final class TemplateWriter extends TreesTransformer<TemplateWriter.Context> { private final TypeElement sourceElement; private final String simpleName; private final SwissArmyKnife knife; public TemplateWriter(SwissArmyKnife knife, TypeElement sourceElement, String simpleName) { this.knife = knife; this.sourceElement = sourceElement; this.simpleName = simpleName; } public CharSequence toCharSequence(Unit unit) { Context context = new Context(); transform(context, unit); return context.builder; } @Override public Unit transform(Context context, Unit value) { context.out("package ", knife.elements.getPackageOf(sourceElement).getQualifiedName(), ";") .ln().ln() .out("import static ", Intrinsics.class, ".*;") .ln().ln(); context .out("@", SuppressWarnings.class, "(", toLiteral("all"), ")") .ln() .out("public class ", simpleName, " extends ", sourceElement.getQualifiedName()) .out(" ").openBrace(); int braces = context.getAndSetPendingBraces(0); Unit unit = super.transform(context, value); writeTemplateDispatch(context); context.getAndSetPendingBraces(braces); context.ln().closeBraces().ln(); return unit; } private void writeTemplateDispatch(Context context) { int initialBraces = context.getAndSetPendingBraces(0); context.out("private class FragmentDispatch extends ", Templates.Fragment.class, "") .openBrace().ln() .out("private final int index;").ln() .out("FragmentDispatch(int arity, int index)") .openBrace().ln() .out("super(arity);").ln() .out("this.index = index;").ln() .closeBrace().ln() .out("@Override public void run(", Templates.Invokation.class, " invokation)") .openBrace().indent().ln() .out("switch (index)").openBrace().ln(); for (int i = 0; i < context.templateIndex.size(); i++) { String templateName = context.templateIndex.get(i); context.out("case ", i, ": _t", i, "__", templateName, "(invokation); break;").ln(); } context.out("default: break;"); context.outdent().ln() .closeBraces().ln() .getAndSetPendingBraces(initialBraces); } @Override public Template transform(final Context context, final Template template) { String name = template.declaration().name().value(); context.ln() .out(template.isPublic() ? "public " : "") .out(Templates.Invokable.class) .out(" ") .out(name) .out("() { return ") .out(name) .out("; }").ln(); context.out("private "); new DispatchedTemplateLike() { { declaration = template.declaration(); variable = true; } @Override void body() { transformTemplateDeclaration(context, template, template.declaration()); transformTemplateListParts(context, template, template.parts()); } }.generate(context); context.out(";").ln(); return template; } abstract class DispatchedTemplateLike { boolean variable; Trees.InvokableDeclaration declaration; final void generate(Context context) { if (variable) { context.out("final ") .out(Templates.Invokable.class) .out(" ") .out(declaration.name().value()) .out(" = "); } String templateName = declaration.name().value(); int templateIndex = context.indexTemplate(templateName); context.out("new FragmentDispatch(", declaration.parameters().size(), ", ", templateIndex, ");").ln(); context.out("void _t", templateIndex, "__", templateName, "(") .out(Templates.Invokation.class) .out(" __) ") .openBrace() .indent() .ln(); int braces = context.getAndSetPendingBraces(0); context.delimit(); body(); context.delimit(); context.getAndSetPendingBraces(braces); context.outdent().ln().closeBraces(); } abstract void body(); } abstract class TemplateLike { boolean variable; Trees.InvokableDeclaration declaration; final void generate(Context context) { if (variable) { context.out("final ") .out(Templates.Invokable.class) .out(" ") .out(declaration.name().value()) .out(" = "); } context.out("new ").out(Templates.Fragment.class) .out("(", declaration.parameters().size(), ") ") .openBrace() .ln() .out("@Override public void run(").out(Templates.Invokation.class).out(" __) ") .openBrace() .indent() .ln(); int braces = context.getAndSetPendingBraces(0); context.delimit(); body(); context.delimit(); context.getAndSetPendingBraces(braces); context.outdent().ln().closeBraces(); } abstract void body(); } @Override public LetStatement transform(final Context context, final LetStatement statement) { new TemplateLike() { { declaration = statement.declaration(); variable = true; } @Override void body() { context.out("final ") .out(Templates.Invokable.class) .out(" ") .out(statement.declaration().name().value()) .out(" = this;") .ln(); transformLetStatementDeclaration(context, statement, statement.declaration()); transformLetStatementListParts(context, statement, statement.parts()); } }.generate(context); context.out(";").delimit(); return statement; } @Override public ForStatement transform(Context context, ForStatement statement) { context.openBrace(); if (statement.useForAccess()) { context.infor() .out("final ") .out(Templates.Iteration.class) .out(" ") .out(context.accessMapper(TypeResolver.ITERATION_ACCESS_VARIABLE)) .out(" = new ") .out(Templates.Iteration.class) .out("();") .ln(); } transformForStatementListDeclaration(context, statement, statement.declaration()); int braces = context.getAndSetPendingBraces(0); context.indent(); if (statement.useDelimit()) { context.delimit(); } transformForStatementListParts(context, statement, statement.parts()); if (statement.useDelimit()) { context.delimit(); } if (statement.useForAccess()) { context.out(context.accessMapper(TypeResolver.ITERATION_ACCESS_VARIABLE)).out(".index++;").ln(); context.out(context.accessMapper(TypeResolver.ITERATION_ACCESS_VARIABLE)).out(".first = false;"); context.outfor(); } context.getAndSetPendingBraces(braces); context.outdent().ln() .closeBraces().ln(); if (statement.useDelimit()) { context.delimit(); } return statement; } @Override public InvokeString transform(Context context, InvokeString value) { context.out("$(__, ", value.literal(), ");").ln(); return value; } @Override public InvokeStatement transform(final Context context, final InvokeStatement statement) { context.out("$(__, "); transformInvokeStatementAccess(context, statement, statement.access()); transformInvokeStatementListParams(context, statement, statement.params()); if (!statement.parts().isEmpty()) { context.out(", "); new TemplateLike() { { declaration = InvokableDeclaration.builder() .name(Identifier.of("")) .build(); } @Override void body() { transformInvokeStatementListParts(context, statement, statement.parts()); } }.generate(context); } context.out(");").ln(); return statement; } @Override protected Iterable<Expression> transformInvokeStatementListParams( Context context, InvokeStatement value, List<Expression> collection) { for (Trees.Expression element : collection) { context.out(", "); transformInvokeStatementParams(context, value, element); } return collection; } @Override public AssignGenerator transform(Context context, AssignGenerator generator) { transformAssignGeneratorDeclaration(context, generator, generator.declaration()); // context.out(" = (") // .out(requiredResolvedTypeOfDeclaration(generator.declaration())) // .out(") $("); context.out(" = $cast("); transformAssignGeneratorFrom(context, generator, generator.from()); context.out(");").ln(); return generator; } @Override public TransformGenerator transform(Context context, TransformGenerator generator) { context .out(Collection.class) .out("<") .out(generator.declaration().containedType().get()) .out("> ") .out(generator.declaration().name().value()) .out(" = ") .out(Intrinsics.class) .out(".$collect();") .ln(); int braces = context.getAndSetPendingBraces(0); context.out("for ("); transformTransformGeneratorVarDeclaration(context, generator, generator.varDeclaration()); context.out(" : $in("); transformTransformGeneratorFrom(context, generator, generator.from()); context.out(")) ").openBrace().indent().ln(); if (generator.condition().isPresent()) { context.out("if ($if("); transformTransformGeneratorOptionalCondition(context, generator, generator.condition()); context.out(")) ").openBrace().ln(); } context.out(generator.declaration().name().value()).out(".add("); transformTransformGeneratorTransform(context, generator, generator.transform()); context.out(");"); context.outdent().ln().closeBraces(); context.getAndSetPendingBraces(braces); return generator; } @Override public IterationGenerator transform(Context context, IterationGenerator generator) { context.out("for ("); transformIterationGeneratorDeclaration(context, generator, generator.declaration()); context.out(" : $in("); transformIterationGeneratorFrom(context, generator, generator.from()); context.out(")) ").openBrace().ln(); if (generator.condition().isPresent()) { context.out("if ($if("); transformIterationGeneratorOptionalCondition(context, generator, generator.condition()); context.out(")) ").openBrace().ln(); } return generator; } @Override public ValueDeclaration transform( Context context, ValueDeclaration value) { context.out("final ").out(requiredResolvedTypeOfDeclaration(value)).out(" ").out(value.name().value()); return value; } private Object requiredResolvedTypeOfDeclaration(Trees.ValueDeclaration value) { return ((ResolvedType) value.type().get()).type(); } @Override public TextLine transform(Context context, TextLine line) { if (line.fragment().value().isEmpty()) { if (line.newline()) { context.out("__.ln();").ln(); } } else { context.out("__.out(") .out(line.fragment()) .out(line.newline() ? ").ln();" : ");").ln(); } return line; } @Override public StringLiteral transform(Context context, StringLiteral value) { context.out(value); return value; } @Override public BoundAccessExpression transform(Context context, BoundAccessExpression value) { ImmutableList<Accessors.BoundAccess> accessList = TypeResolver.asBoundAccess(value.accessor()); StringBuilder expressionBuilder = new StringBuilder(); for (int i = 0; i < accessList.size(); i++) { boolean first = i == 0; boolean last = i != accessList.size() - 1; Accessors.BoundAccess access = accessList.get(i); if (!first) { expressionBuilder.append("."); } String name = access.name; if (first) { name = context.accessMapper(name); } expressionBuilder.append(name).append(access.callable ? "()" : ""); if (access.boxed && last) { expressionBuilder.insert(0, "$("); expressionBuilder.append(")"); } } context.out(expressionBuilder); return value; } @Override public ApplyExpression transform(Context context, ApplyExpression value) { context.out("$("); ApplyExpression expression = super.transform(context, value); context.out(")"); return expression; } @Override protected Iterable<Expression> transformApplyExpressionListParams( Context context, ApplyExpression value, List<Expression> collection) { boolean first = true; for (Trees.Expression element : collection) { if (!first) { context.out(", "); } first = false; transformApplyExpressionParams(context, value, element); } return collection; } private void writeConditionPart(Context context, ConditionalBlock block) { context.out("if ($if("); transformConditionalBlockCondition(context, block, block.condition()); context.out(")) {") .indent() .ln(); context.delimit(); transformConditionalBlockListParts(context, block, block.parts()); } @Override public IfStatement transform(Context context, IfStatement statement) { context.delimit().ln(); writeConditionPart(context, (ConditionalBlock) statement.then()); for (Trees.ConditionalBlock block : statement.otherwiseIf()) { context.outdent().out("} else "); writeConditionPart(context, (ConditionalBlock) block); } if (statement.otherwise().isPresent()) { context.outdent() .ln() .out("} else {") .indent() .ln() .delimit(); transform(context, (Block) statement.otherwise().get()); } context.outdent() .ln() .out("}") .ln() .delimit(); return statement; } @Override public Comment transform(Context context, Comment value) { context.delimit(); return value; } @Override public InvokableDeclaration transform(Context context, InvokableDeclaration value) { int count = 0; for (Trees.Parameter parameter : value.parameters()) { int paramIndex = count++; String typeName = parameter.type().toString(); context.out("final ", typeName, " ", parameter.name().value()).out(" = "); if (typeName.equals(String.class.getName())) { context.out("__.param(", paramIndex, ").toString();").ln(); } else if (typeName.equals(Boolean.class.getName())) { context.out("$if(__.param(", paramIndex, "));").ln(); } else if (typeName.equals(Object.class.getName())) { context.out("__.param(", paramIndex, ");").ln(); } else { context.out("$cast(__.param(", paramIndex, "));").ln(); } } return super.transform(context, value); } static class Context { final List<String> templateIndex = Lists.newArrayListWithExpectedSize(100); final StringBuilder builder = new StringBuilder(); private int indentLevel; private int bracesToClose; private int forLevels; Context infor() { forLevels++; return this; } Context delimit() { // Avoid delimits on a top level when there's not surrounding template if (indentLevel > 0) { out("__.dl();"); } return this; } Context outfor() { forLevels--; return this; } Context indent() { indentLevel++; return this; } Context outdent() { indentLevel--; return this; } Context out(Object... objects) { for (Object object : objects) { out(object); } return this; } int indexTemplate(String template) { int index = templateIndex.size(); templateIndex.add(template); return index; } public String accessMapper(String identifer) { if (TypeResolver.ITERATION_ACCESS_VARIABLE.equals(identifer)) { return "_it" + forLevels; } return identifer; } int getAndSetPendingBraces(int bracesToClose) { int value = this.bracesToClose; this.bracesToClose = bracesToClose; return value; } Context closeBraces() { for (int i = 0; i < bracesToClose; i++) { builder.append('}'); } bracesToClose = 0; return this; } Context openBrace() { builder.append('{'); bracesToClose++; return this; } Context closeBrace() { builder.append('}'); bracesToClose--; return this; } Context out(Object object) { if (object instanceof Optional<?>) { object = ((Optional<?>) object).orNull(); } if (object instanceof Class<?>) { object = ((Class<?>) object).getCanonicalName(); } if (object instanceof CharSequence) { builder.append((CharSequence) object); return this; } builder.append(String.valueOf(object)); return this; } Context ln() { builder.append('\n'); for (int i = 0; i < indentLevel; i++) { builder.append(" "); } return this; } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.packages; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.collect.nestedset.Depset; import com.google.devtools.build.lib.collect.nestedset.NestedSet.NestedSetDepthException; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.EvalUtils; import com.google.devtools.build.lib.syntax.Printer; import com.google.devtools.build.lib.syntax.Sequence; import com.google.devtools.build.lib.syntax.Starlark; import com.google.devtools.build.lib.util.LoggingUtil; import com.google.devtools.build.lib.util.StringCanonicalizer; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import javax.annotation.Nullable; /** * Root of Type symbol hierarchy for values in the build language. * * <p>Type symbols are primarily used for their <code>convert</code> method, which is a kind of cast * operator enabling conversion from untyped (Object) references to values in the build language, to * typed references. * * <p>For example, this code type-converts a value <code>x</code> returned by the evaluator, to a * list of strings: * * <pre> * Object x = expr.eval(env); * List&lt;String&gt; s = Type.STRING_LIST.convert(x); * </pre> * * <p><b>BEFORE YOU ADD A NEW TYPE:</b> * * <p>We frequently get requests to create a new kind of attribute type whenever a use case doesn't * seem to fit into one of the existing types. This is almost always a bad idea. The most complex * type we currently have is probably STRING_LIST_DICT or maybe LABEL_KEYED_STRING_DICT. But no * matter what you support, someone will always want to add another layer of structure. It's even * been suggested to allow JSON or arbitrary Starlark values in attributes. * * <p>Adding a new type has implications for many different systems. The whole of the loading phase * needs to know about the type -- how to serialize it, how to format it for `bazel query`, how to * traverse label dependencies embedded within it. Then you need to think about how to represent * attribute values of that type in Starlark within a rule implementation function, and come up with * a good name for that type in the Starlark `attr` module. All of the tooling for formatting, * linting, and analyzing BUILD files may need to be updated. * * <p>It's usually possible to accomplish the end goal without making the target attribute grammar * more expressive. If it's not, that may be a sign that attributes are not the right mechanism to * use, and perhaps instead you should use opaque string identifiers, or labels to sub-targets with * more structure (think toolchains, platforms, config_setting). * * <p>Any new attribute type should be general-purpose and meet a high bar of usefulness (unlikely * since we seem to be doing fine so far without it), and not overly complicate BUILD files or rule * implementation functions. */ public abstract class Type<T> { protected Type() {} /** * Converts untyped Object x resulting from the evaluation of an expression in the build language, * into a typed object of type T. * * <p>x must be *directly* convertible to this type. This therefore disqualifies "selector * expressions" of the form "{ config1: 'value1_of_orig_type', config2: 'value2_of_orig_type; }" * (which support configurable attributes). To handle those expressions, see * {@link com.google.devtools.build.lib.packages.BuildType#selectableConvert}. * * @param x the build-interpreter value to convert. * @param what an object having a toString describing what x is for; should be included in * any exception thrown. Grammatically, must produce a string describe a syntactic * construct, e.g. "attribute 'srcs' of rule foo". * @param context the label of the current BUILD rule; must be non-null if resolution of * package-relative label strings is required * @throws ConversionException if there was a problem performing the type conversion */ public abstract T convert(Object x, Object what, @Nullable Object context) throws ConversionException; // TODO(bazel-team): Check external calls (e.g. in PackageFactory), verify they always want // this over selectableConvert. /** * Equivalent to {@link #convert(Object, Object, Object)} where the label is {@code null}. * Useful for converting values to types that do not involve the type {@code LABEL} * and hence do not require the label of the current package. */ public final T convert(Object x, Object what) throws ConversionException { return convert(x, what, null); } /** * Like {@link #convert(Object, Object, Object)}, but converts Starlark {@code None} to given * {@code defaultValue}. */ @Nullable public final T convertOptional(Object x, String what, @Nullable Object context, T defaultValue) throws ConversionException { if (EvalUtils.isNullOrNone(x)) { return defaultValue; } return convert(x, what, context); } /** * Like {@link #convert(Object, Object, Object)}, but converts Starlark {@code None} to java * {@code null}. */ @Nullable public final T convertOptional(Object x, String what, @Nullable Object context) throws ConversionException { return convertOptional(x, what, context, null); } /** * Like {@link #convert(Object, Object)}, but converts Starlark {@code NONE} to java {@code null}. */ @Nullable public final T convertOptional(Object x, String what) throws ConversionException { return convertOptional(x, what, null); } public abstract T cast(Object value); @Override public abstract String toString(); /** * Returns the default value for this type; may return null iff no default is defined for this * type. */ public abstract T getDefaultValue(); /** * Function accepting a (potentially null) {@link Label} and an arbitrary context object. Used by * {@link #visitLabels}. */ public interface LabelVisitor<C> { void visit(@Nullable Label label, @Nullable C context); } /** * Invokes {@code visitor.visit(label, context)} for each {@link Label} {@code label} associated * with {@code value}, which is assumed an instance of this {@link Type}. * * <p>This is used to support reliable label visitation in {@link * com.google.devtools.build.lib.packages.AbstractAttributeMapper#visitLabels}. To preserve that * reliability, every type should faithfully define its own instance of this method. In other * words, be careful about defining default instances in base types that get auto-inherited by * their children. Keep all definitions as explicit as possible. */ public abstract <C> void visitLabels(LabelVisitor<C> visitor, Object value, @Nullable C context); /** Classifications of labels by their usage. */ public enum LabelClass { /** Used for types which are not labels. */ NONE, /** Used for types which use labels to declare a dependency. */ DEPENDENCY, /** * Used for types which use labels to reference another target but do not declare a dependency, * in cases where doing so would cause a dependency cycle. */ NONDEP_REFERENCE, /** Used for types which use labels to declare an output path. */ OUTPUT, /** * Used for types which contain Fileset entries, which contain labels but do not produce * normal dependencies. */ FILESET_ENTRY } /** Returns the class of labels contained by this type, if any. */ public LabelClass getLabelClass() { return LabelClass.NONE; } /** * Implementation of concatenation for this type (e.g. "val1 + val2"). Returns null to * indicate concatenation isn't supported. */ public T concat(@SuppressWarnings("unused") Iterable<T> elements) { return null; } /** * Converts an initialized Type object into a tag set representation. * This operation is only valid for certain sub-Types which are guaranteed * to be properly initialized. * * @param value the actual value * @throws UnsupportedOperationException if the concrete type does not support * tag conversion or if a convertible type has no initialized value. */ public Set<String> toTagSet(Object value, String name) { String msg = "Attribute " + name + " does not support tag conversion."; throw new UnsupportedOperationException(msg); } /** The type of an integer. */ @AutoCodec public static final Type<Integer> INTEGER = new IntegerType(); /** The type of a string. */ @AutoCodec public static final Type<String> STRING = new StringType(); /** The type of a boolean. */ @AutoCodec public static final Type<Boolean> BOOLEAN = new BooleanType(); /** The type of a list of not-yet-typed objects. */ @AutoCodec public static final ObjectListType OBJECT_LIST = new ObjectListType(); /** The type of a list of {@linkplain #STRING strings}. */ @AutoCodec public static final ListType<String> STRING_LIST = ListType.create(STRING); /** The type of a list of {@linkplain #INTEGER strings}. */ @AutoCodec public static final ListType<Integer> INTEGER_LIST = ListType.create(INTEGER); /** The type of a dictionary of {@linkplain #STRING strings}. */ @AutoCodec public static final DictType<String, String> STRING_DICT = DictType.create(STRING, STRING); /** The type of a dictionary of {@linkplain #STRING_LIST label lists}. */ @AutoCodec public static final DictType<String, List<String>> STRING_LIST_DICT = DictType.create(STRING, STRING_LIST); /** * For ListType objects, returns the type of the elements of the list; for * all other types, returns null. (This non-obvious implementation strategy * is necessitated by the wildcard capture rules of the Java type system, * which disallow conversion from Type{List{ELEM}} to Type{List{?}}.) */ public Type<?> getListElementType() { return null; } /** * ConversionException is thrown when a type conversion fails; it contains an explanatory error * message. */ public static class ConversionException extends EvalException { private static String message(Type<?> type, Object value, @Nullable Object what) { Printer.BasePrinter printer = Printer.getPrinter(); printer.append("expected value of type '").append(type.toString()).append("'"); if (what != null) { printer.append(" for ").append(what.toString()); } printer.append(", but got "); printer.repr(value); printer.append(" (").append(Starlark.type(value)).append(")"); return printer.toString(); } public ConversionException(Type<?> type, Object value, @Nullable Object what) { super(null, message(type, value, what)); } public ConversionException(String message) { super(null, message); } } /******************************************************************** * * * Subclasses * * * ********************************************************************/ private static class ObjectType extends Type<Object> { @Override public Object cast(Object value) { return value; } @Override public String getDefaultValue() { throw new UnsupportedOperationException( "ObjectType has no default value"); } @Override public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) { } @Override public String toString() { return "object"; } @Override public Object convert(Object x, Object what, Object context) { return x; } } private static class IntegerType extends Type<Integer> { @Override public Integer cast(Object value) { return (Integer) value; } @Override public Integer getDefaultValue() { return 0; } @Override public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) { } @Override public String toString() { return "int"; } @Override public Integer convert(Object x, Object what, Object context) throws ConversionException { if (!(x instanceof Integer)) { throw new ConversionException(this, x, what); } return (Integer) x; } @Override public Integer concat(Iterable<Integer> elements) { int ans = 0; for (Integer elem : elements) { ans += elem; } return Integer.valueOf(ans); } } private static class BooleanType extends Type<Boolean> { @Override public Boolean cast(Object value) { return (Boolean) value; } @Override public Boolean getDefaultValue() { return false; } @Override public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) { } @Override public String toString() { return "boolean"; } // Conversion to boolean must also tolerate integers of 0 and 1 only. @Override public Boolean convert(Object x, Object what, Object context) throws ConversionException { if (x instanceof Boolean) { return (Boolean) x; } Integer xAsInteger = INTEGER.convert(x, what, context); if (xAsInteger == 0) { return false; } else if (xAsInteger == 1) { return true; } throw new ConversionException("boolean is not one of [0, 1]"); } /** * Booleans attributes are converted to tags based on their names. */ @Override public Set<String> toTagSet(Object value, String name) { if (value == null) { String msg = "Illegal tag conversion from null on Attribute " + name + "."; throw new IllegalStateException(msg); } String tag = (Boolean) value ? name : "no" + name; return ImmutableSet.of(tag); } } private static class StringType extends Type<String> { @Override public String cast(Object value) { return (String) value; } @Override public String getDefaultValue() { return ""; } @Override public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) { } @Override public String toString() { return "string"; } @Override public String convert(Object x, Object what, Object context) throws ConversionException { if (!(x instanceof String)) { throw new ConversionException(this, x, what); } return StringCanonicalizer.intern((String) x); } @Override public String concat(Iterable<String> elements) { return Joiner.on("").join(elements); } /** * A String is representable as a set containing its value. */ @Override public Set<String> toTagSet(Object value, String name) { if (value == null) { String msg = "Illegal tag conversion from null on Attribute " + name + "."; throw new IllegalStateException(msg); } return ImmutableSet.of((String) value); } } /** * A type to support dictionary attributes. */ public static class DictType<KeyT, ValueT> extends Type<Map<KeyT, ValueT>> { private final Type<KeyT> keyType; private final Type<ValueT> valueType; private final Map<KeyT, ValueT> empty = ImmutableMap.of(); private final LabelClass labelClass; @Override public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) { for (Map.Entry<KeyT, ValueT> entry : cast(value).entrySet()) { keyType.visitLabels(visitor, entry.getKey(), context); valueType.visitLabels(visitor, entry.getValue(), context); } } public static <KEY, VALUE> DictType<KEY, VALUE> create( Type<KEY> keyType, Type<VALUE> valueType) { LabelClass keyLabelClass = keyType.getLabelClass(); LabelClass valueLabelClass = valueType.getLabelClass(); Preconditions.checkArgument( keyLabelClass == LabelClass.NONE || valueLabelClass == LabelClass.NONE || keyLabelClass == valueLabelClass, "A DictType's keys and values must be the same class of label if both contain labels, " + "but the key type %s contains %s labels, while " + "the value type %s contains %s labels.", keyType, keyLabelClass, valueType, valueLabelClass); LabelClass labelClass = (keyLabelClass != LabelClass.NONE) ? keyLabelClass : valueLabelClass; return new DictType<>(keyType, valueType, labelClass); } protected DictType(Type<KeyT> keyType, Type<ValueT> valueType, LabelClass labelClass) { this.keyType = keyType; this.valueType = valueType; this.labelClass = labelClass; } public Type<KeyT> getKeyType() { return keyType; } public Type<ValueT> getValueType() { return valueType; } @Override public LabelClass getLabelClass() { return labelClass; } @SuppressWarnings("unchecked") @Override public Map<KeyT, ValueT> cast(Object value) { return (Map<KeyT, ValueT>) value; } @Override public String toString() { return "dict(" + keyType + ", " + valueType + ")"; } @Override public Map<KeyT, ValueT> convert(Object x, Object what, Object context) throws ConversionException { if (!(x instanceof Map<?, ?>)) { throw new ConversionException(this, x, what); } Map<?, ?> o = (Map<?, ?>) x; // It's possible that #convert() calls transform non-equal keys into equal ones so we can't // just use ImmutableMap.Builder() here (that throws on collisions). LinkedHashMap<KeyT, ValueT> result = new LinkedHashMap<>(); for (Map.Entry<?, ?> elem : o.entrySet()) { result.put( keyType.convert(elem.getKey(), "dict key element", context), valueType.convert(elem.getValue(), "dict value element", context)); } return ImmutableMap.copyOf(result); } @Override public Map<KeyT, ValueT> getDefaultValue() { return empty; } } /** A type for lists of a given element type */ public static class ListType<ElemT> extends Type<List<ElemT>> { private final Type<ElemT> elemType; private final List<ElemT> empty = ImmutableList.of(); public static <ELEM> ListType<ELEM> create(Type<ELEM> elemType) { return new ListType<>(elemType); } private ListType(Type<ElemT> elemType) { this.elemType = elemType; } @SuppressWarnings("unchecked") @Override public List<ElemT> cast(Object value) { return (List<ElemT>) value; } @Override public Type<ElemT> getListElementType() { return elemType; } @Override public LabelClass getLabelClass() { return elemType.getLabelClass(); } @Override public List<ElemT> getDefaultValue() { return empty; } @Override public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) { List<ElemT> elems = cast(value); // Hot code path. Optimize for lists with O(1) access to avoid iterator garbage. if (elems instanceof ImmutableList || elems instanceof ArrayList) { for (int i = 0; i < elems.size(); i++) { elemType.visitLabels(visitor, elems.get(i), context); } } else { for (ElemT elem : elems) { elemType.visitLabels(visitor, elem, context); } } } @Override public String toString() { return "list(" + elemType + ")"; } @Override public List<ElemT> convert(Object x, Object what, Object context) throws ConversionException { Iterable<?> iterable; if (x instanceof Iterable) { iterable = (Iterable<?>) x; } else if (x instanceof Depset) { try { iterable = ((Depset) x).toCollection(); } catch (NestedSetDepthException exception) { throw new ConversionException( "depset exceeded maximum depth " + exception.getDepthLimit() + ". This was only discovered when attempting to flatten the depset for" + " iteration, as the size of depsets is unknown until flattening. See" + " https://github.com/bazelbuild/bazel/issues/9180 for details and possible " + "solutions."); } } else { throw new ConversionException(this, x, what); } int index = 0; List<ElemT> result = new ArrayList<>(Iterables.size(iterable)); ListConversionContext conversionContext = new ListConversionContext(what); for (Object elem : iterable) { conversionContext.update(index); ElemT converted = elemType.convert(elem, conversionContext, context); if (converted != null) { result.add(converted); } else { // shouldn't happen but it does, rarely String message = "Converting a list with a null element: " + "element " + index + " of " + what + " in " + context; LoggingUtil.logToRemote(Level.WARNING, message, new ConversionException(message)); } ++index; } return result; } @Override public List<ElemT> concat(Iterable<List<ElemT>> elements) { ImmutableList.Builder<ElemT> builder = ImmutableList.builder(); for (List<ElemT> list : elements) { builder.addAll(list); } return builder.build(); } /** * A list is representable as a tag set as the contents of itself expressed * as Strings. So a {@code List<String>} is effectively converted to a {@code Set<String>}. */ @Override public Set<String> toTagSet(Object items, String name) { if (items == null) { String msg = "Illegal tag conversion from null on Attribute" + name + "."; throw new IllegalStateException(msg); } Set<String> tags = new LinkedHashSet<>(); @SuppressWarnings("unchecked") List<ElemT> itemsAsListofElem = (List<ElemT>) items; for (ElemT element : itemsAsListofElem) { tags.add(element.toString()); } return tags; } /** * Provides a {@link #toString()} description of the context of the value in a list being * converted. This is preferred over a raw string to avoid uselessly constructing strings which * are never used. This class is mutable (the index is updated). */ private static class ListConversionContext { private final Object what; private int index = 0; ListConversionContext(Object what) { this.what = what; } void update(int index) { this.index = index; } @Override public String toString() { return "element " + index + " of " + what; } } } /** Type for lists of arbitrary objects */ public static class ObjectListType extends ListType<Object> { private static final Type<Object> elemType = new ObjectType(); private ObjectListType() { super(elemType); } @Override @SuppressWarnings("unchecked") public List<Object> convert(Object x, Object what, Object context) throws ConversionException { // TODO(adonovan): converge on EvalUtils.toIterable. if (x instanceof Sequence) { return ((Sequence) x).getImmutableList(); } else if (x instanceof List) { return (List<Object>) x; } else if (x instanceof Iterable) { return ImmutableList.copyOf((Iterable<?>) x); } else { throw new ConversionException(this, x, what); } } } }
/** * Copyright (c) 2004-2005, Regents of the University of California * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of California, Los Angeles nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package avrora.sim.mcu; import avrora.Avrora; import avrora.sim.ActiveRegister; import avrora.sim.RWRegister; import avrora.util.Arithmetic; import avrora.util.StringUtil; import java.util.HashMap; import java.util.Iterator; /** * The <code>RegisterSet</code> class is a utility that simplifies the implementation * of certain IO registers that contain many types of fields whose bits may be spread * out and mixed up over multiple IO registers. For example, a 5 bit field used to * configure a device might be spread among multiple 1, 2, 3, or 4 bit fields across * multiple registers. This class allows a set of those registers to be created, * and collects together writes and reads easily. * * @author Ben L. Titzer */ public class RegisterSet { /** * The <code>Field</code> class represents a collection of bits that represent * a quantity used by a device. The bits that make up this quantity might be spread * over multiple IO registers or mixed up among one IO register. Also, a field might * be in different IO registers depending on the microcontroller model. For this reason, * the <code>Field</code> class offers convenience to the device implementer by collecting * all of the individual bit updates in different registers into one coherent, contiguous * value. * * <p> * Device implementations can simply get a reference to the <code>Field</code> object * (such as a timer mode, prescaler value, etc) by calling <code>getField()</code> * in <code>RegisterSet</code>. */ public static class Field { boolean consistent; public int value; public void write(int nval, int wmask) { value = value & ~wmask | nval; consistent = true; update(); } public void write(int nval) { value = nval; consistent = true; update(); } public void set(int nval) { throw Avrora.unimplemented(); } public void update() { // do nothing. } } static class FieldWriter { int value; int writtenMask; Field fobject; void commit() { fobject.write(value, writtenMask); value = 0; writtenMask = 0; } } static abstract class SubRegWriter { final int ior_low_bit; final int mask; final int length; SubRegWriter(RegisterLayout.SubField sf) { ior_low_bit = sf.ior_low_bit; mask = sf.mask; length = sf.length; } abstract void write(byte val); } static class SubFieldWriter extends SubRegWriter { final FieldWriter fieldWriter; final RegisterLayout.SubField subField; SubFieldWriter(RegisterLayout.SubField sf, FieldWriter fw) { super(sf); subField = sf; fieldWriter = fw; } void write(byte val) { int wval = (val >> ior_low_bit) & mask; fieldWriter.value |= wval << subField.field_low_bit; fieldWriter.writtenMask |= mask << subField.field_low_bit; if ( subField.commit ) fieldWriter.commit(); } } static class TotalFieldWriter extends SubRegWriter { final FieldWriter fieldWriter; TotalFieldWriter(RegisterLayout.SubField sf, FieldWriter fw) { super(sf); fieldWriter = fw; } void write(byte val) { int value = (val >> ior_low_bit) & mask; fieldWriter.fobject.write(value); } } static class ReservedWriter extends SubRegWriter { ReservedWriter(RegisterLayout.SubField sf) { super(sf); } void write(byte val) { // TODO: check that all writes are zeroes } } static class UnusedWriter extends SubRegWriter { UnusedWriter(RegisterLayout.SubField sf) { super(sf); } void write(byte val) { // do nothing. } } class BitWriter { final int fval; final FieldWriter fwriter; BitWriter(int fval, FieldWriter fw) { this.fval = fval; fwriter = fw; } void write(boolean val) { if ( val ) fwriter.value |= fval; fwriter.writtenMask |= fval; fwriter.commit(); } } /** * The <code>Register</code> class implements an IO register that is * directly read and written by the program. This IO register implements writes * that alter multiple fields and subfields in the register set. */ public class MultiFieldRegister implements ActiveRegister { byte value; final SubRegWriter[] subFields; final BitWriter[] bits; MultiFieldRegister(SubRegWriter[] srw, BitWriter[] b) { subFields = srw; bits = b; } public byte read() { return value; } public boolean readBit(int bit) { return Arithmetic.getBit(value, bit); } public void write(byte nval) { this.value = nval; for ( int cntr = 0; cntr < subFields.length; cntr++ ) { SubRegWriter sf = subFields[cntr]; sf.write(nval); } } public void writeBit(int bit, boolean val) { bits[bit].write(val); } } protected final HashMap fields; protected final ActiveRegister[] registers; protected final RegisterLayout layout; /** * The constructor for the <code>RegisterSet</code> class creates a new register set with the specified register * layout and size. * @param rl the layout of all the registers in the set */ public RegisterSet(RegisterLayout rl) { fields = new HashMap(); registers = new ActiveRegister[rl.ioreg_size]; layout = rl; // create the field representations Iterator i = rl.fields.values().iterator(); while ( i.hasNext() ) { RegisterLayout.Field f = (RegisterLayout.Field)i.next(); FieldWriter fw = new FieldWriter(); fw.fobject = new Field(); fields.put(f.name, fw); } // create the active registers for ( int ior = 0; ior < rl.ioreg_size; ior++ ) { RegisterLayout.RegisterInfo ri = rl.info[ior]; if ( ri == null || ri.subfields == null ) { // no subfields; no special register is necessary registers[ior] = new RWRegister(); } else { registers[ior] = createMultiFieldRegister(ri); } } } private MultiFieldRegister createMultiFieldRegister(RegisterLayout.RegisterInfo ri) { // there are subfields in this register; create a special ActiveRegister SubRegWriter[] srw = new SubRegWriter[ri.subfields.length]; for ( int cntr = 0; cntr < srw.length; cntr++ ) { createSubRegWriter(ri, cntr, srw); } BitWriter[] bw = createBitWriters(ri.subfields); return new MultiFieldRegister(srw, bw); } private BitWriter[] createBitWriters(RegisterLayout.SubField[] sfs) { BitWriter[] bw = new BitWriter[8]; int bwcount = 0; for ( int cntr = 0; cntr < sfs.length; cntr++ ) { RegisterLayout.SubField sf = sfs[cntr]; for ( int bit = 0; bit < sf.length; bit++ ) { bw[bwcount++] = new BitWriter(sf.field_low_bit+bit, getFieldWriter(sf)); } } // check that there are exactly 8 bits if ( bwcount != 8 ) { throw new Avrora.Error("RegisterSet Error", "expected 8 bits, found: "+bwcount); } return bw; } private FieldWriter getFieldWriter(RegisterLayout.SubField sf) { if ( sf.field == RegisterLayout.RESERVED || sf.field == RegisterLayout.UNUSED ) { FieldWriter fw = new FieldWriter(); fw.fobject = new Field(); return fw; } return (FieldWriter)fields.get(sf.field.name); } private void createSubRegWriter(RegisterLayout.RegisterInfo ri, int cntr, SubRegWriter[] srw) { RegisterLayout.SubField sf = ri.subfields[cntr]; RegisterLayout.Field field = sf.field; if ( sf.field == RegisterLayout.RESERVED ) { ReservedWriter rw = new ReservedWriter(sf); srw[cntr] = rw; } else if ( sf.field == RegisterLayout.UNUSED ) { UnusedWriter uw = new UnusedWriter(sf); srw[cntr] = uw; } else if ( sf.field.subfields.length == 1) { // if the field has only one subfield, write the whole field value at once TotalFieldWriter tfw = new TotalFieldWriter(sf, (FieldWriter)fields.get(field.name)); srw[cntr] = tfw; } else { // otherwise, this subfield is for a field that is fragmented SubFieldWriter sfw = new SubFieldWriter(sf, (FieldWriter)fields.get(field.name)); srw[cntr] = sfw; } } /** * The <code>getSize()</code> method returns the total number of registers in this register set. * @return the number of IO registers in this set */ public int getSize() { return registers.length; } /** * The <code>installIOReg()</code> method installs a new register at the specified address. This is intented * to be used only in the device implementations. * @param ar the active register to install * @param ior the address to install the active register to */ public void installIOReg(ActiveRegister ar, int ior) { registers[ior] = ar; } /** * The <code>getRegisterLayout()</code> method gets a reference to the register layout object for this * register set. The register layout describes where each IO register is and what fields it contains. * @return a reference to the register layout for this register set */ public RegisterLayout getRegisterLayout() { return layout; } /** * The <code>share()</code> method is NOT meant for general use. It is used ONLY by the interpreter to * share the underlying array representation that maps from register address to an actual register * object. * @return a reference to the array containing references to the actual register objects */ public ActiveRegister[] share() { return registers; } /** * The <code>getField()</code> gets an object that represents an entire field which * may be stored across multiple registers in multiple bit fields. This object allows * access to the field's value without consideration for its underlying representation * in the IO register(s). * @param fname the name of the fieldWriter to get the fieldWriter representation for * @return a reference to the <code>FieldWriter</code> object that represents the field */ public Field getField(String fname) { FieldWriter fwriter = getFieldWriter(fname); return fwriter.fobject; } private FieldWriter getFieldWriter(String fname) { FieldWriter fwriter = ((FieldWriter)fields.get(fname)); if ( fwriter == null ) { throw Avrora.failure("Field not found in RegisterSet: "+StringUtil.quote(fname)); } return fwriter; } /** * The <code>installField()</code> method allows device implementations to substitute a new field * implementation for the named field. The field implementation can then override the appropriate * methods of the <code>RegisterSet.Field</code> class to be notified upon writes. * @param fname the name of the field * @param fo the field object to install for this field * @return the new field installed */ public Field installField(String fname, Field fo) { FieldWriter fwriter = getFieldWriter(fname); fwriter.fobject = fo; return fo; } }
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.component.app.postem.data; import java.io.Serializable; import java.sql.Timestamp; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import org.apache.commons.lang.StringEscapeUtils; import org.sakaiproject.api.app.postem.data.Gradebook; import org.sakaiproject.api.app.postem.data.StudentGrades; import org.sakaiproject.api.app.postem.data.Template; public class StudentGradesImpl implements StudentGrades, Comparable, Serializable { protected Gradebook gradebook; protected String username; protected List grades = new ArrayList(); protected DateFormat dateFormat = new SimpleDateFormat("d MMM yyyy HH:mm"); protected Timestamp lastChecked; protected Long id; protected Integer lockId; public StudentGradesImpl() { } public StudentGradesImpl(String username, List grades) { // ensure the usernames are trimmed and lowercase this.username = username.trim().toLowerCase(); this.grades = grades; } public Integer getLockId() { return lockId; } public void setLockId(Integer lockId) { this.lockId = lockId; } public Gradebook getGradebook() { return gradebook; } public void setGradebook(Gradebook gradebook) { this.gradebook = gradebook; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username.trim(); } public List getGrades() { return grades; } public void setGrades(List grades) { this.grades = grades; } public String getCheckDateTime() { if (lastChecked == null) { return "never"; } return dateFormat.format((Date) lastChecked); } public Timestamp getLastChecked() { return lastChecked; } public void setLastChecked(Timestamp lastChecked) { this.lastChecked = lastChecked; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public int compareTo(Object other) { if (this == other) return 0; final StudentGrades that = (StudentGrades) other; return this.getUsername().compareTo(that.getUsername()); } public boolean equals(Object other) { if (this == other) return true; if (!(other instanceof StudentGrades)) return false; final StudentGrades that = (StudentGrades) other; return this.getUsername().equals(that.getUsername()); } public int hashCode() { return getUsername().hashCode(); } public boolean getReadAfterUpdate() { if (lastChecked == null) { return false; } return getLastChecked().after(gradebook.getLastUpdated()); } /** * Formats the grades for display, independently of the JSF display. If a * {@link Template} exists for the parent gradebook, that template's * fillGrades method is used. Otherwise, the grades are formatted into a plain * old table. * <p> * This is a bad method for including display code within it; however, I do * this for a simple reason: we're already including display code at this * level via the template. * <p> * The prettier eventual solution will be to inject a default template via the * controller, or possibly in the manager class (using a defaultTemplate * property). This works for the quick and dirty now. */ public String formatGrades() { if (gradebook.getTemplate() == null) { List h2 = new ArrayList(gradebook.getHeadings()); StringBuilder gradeBuffer = new StringBuilder(); gradeBuffer.append("<table class=\"itemSummary\">"); if (h2.size() != 0) { gradeBuffer.append("<tr><th scope=\"row\">" + StringEscapeUtils.escapeHtml(h2.get(0).toString()) + "</th><td>"); h2.remove(0); gradeBuffer.append(StringEscapeUtils.escapeHtml(getUsername())); gradeBuffer.append("</td></tr>"); Iterator ii = h2.iterator(); Iterator jj = grades.iterator(); while (ii.hasNext()) { gradeBuffer.append("<tr><th scope=\"row\">"); gradeBuffer.append(StringEscapeUtils.escapeHtml((String) ii.next())); gradeBuffer.append("</th><td>"); gradeBuffer.append(StringEscapeUtils.escapeHtml((String) jj.next())); gradeBuffer.append("</td></tr>"); } } else { gradeBuffer.append("<tr><td>"); gradeBuffer.append(StringEscapeUtils.escapeHtml(getUsername())); gradeBuffer.append("</td></tr>"); Iterator jj = grades.iterator(); while (jj.hasNext()) { gradeBuffer.append("<tr><td>"); gradeBuffer.append(StringEscapeUtils.escapeHtml((String) jj.next())); gradeBuffer.append("</td></tr>"); } } gradeBuffer.append("</table>"); return gradeBuffer.toString(); } else { return gradebook.getTemplate().fillGrades(this); } } public String getGradesRow() { StringBuilder gradeBuffer = new StringBuilder(); // gradeBuffer.append("<table><tr>"); int totalWidth = 0; Iterator jj = grades.iterator(); int ii = 0; while (jj.hasNext()) { String current = (String) jj.next(); String width = gradebook.getProperWidth(ii); int iwidth = Integer.parseInt(width.substring(0, width.length() - 2)); totalWidth += iwidth; /*gradeBuffer.append("<td width='"); gradeBuffer.append(width); gradeBuffer.append("' style='min-width: "); gradeBuffer.append(width); gradeBuffer.append("; width: "); gradeBuffer.append(width); gradeBuffer.append(";' >");*/ gradeBuffer.append("<td style=\"padding:0.6em;\">"); gradeBuffer.append(StringEscapeUtils.escapeHtml(current)); gradeBuffer.append("</td>"); ii++; } /*StringBuilder newBuffer = new StringBuilder(); newBuffer.append("<table width='"); newBuffer.append(totalWidth); newBuffer.append("px' style='min-width: "); newBuffer.append(totalWidth); newBuffer.append("px; width: "); newBuffer.append(totalWidth); newBuffer.append("px;' ><tr>"); newBuffer.append(gradeBuffer); newBuffer.append("</tr></table>"); newBuffer.append("</tr>");*/ return gradeBuffer.toString(); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova.mediaac; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaResourceApi; import android.content.Context; import android.media.AudioManager; import android.media.AudioManager.OnAudioFocusChangeListener; import android.net.Uri; import java.lang.String; import java.util.ArrayList; import org.apache.cordova.LOG; import org.apache.cordova.PluginResult; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.HashMap; /** * This class called by CordovaActivity to play and record audio. * The file can be local or over a network using http. * * Audio formats supported (tested): * .mp3, .wav * * Local audio files must reside in one of two places: * android_asset: file name must start with /android_asset/sound.mp3 * sdcard: file name is just sound.mp3 */ public class AudioHandler extends CordovaPlugin { public static String TAG = "AudioHandler"; HashMap<String, AudioPlayer> players; // Audio player object ArrayList<AudioPlayer> pausedForPhone; // Audio players that were paused when phone call came in ArrayList<AudioPlayer> pausedForFocus; // Audio players that were paused when focus was lost private int origVolumeStream = -1; private CallbackContext messageChannel; /** * Constructor. */ public AudioHandler() { this.players = new HashMap<String, AudioPlayer>(); this.pausedForPhone = new ArrayList<AudioPlayer>(); this.pausedForFocus = new ArrayList<AudioPlayer>(); //Register icy protocol try { java.net.URL.setURLStreamHandlerFactory( new java.net.URLStreamHandlerFactory(){ public java.net.URLStreamHandler createURLStreamHandler( String protocol ) { LOG.d( "Registrando icy", "Asking for stream handler for protocol: '" + protocol + "'" ); if ("icy".equals( protocol )) return new com.spoledge.aacdecoder.IcyURLStreamHandler(); return null; } }); } catch (Throwable t) { LOG.w( "Registrando icy", "Cannot set the ICY URLStreamHandler - maybe already set ? - " + t ); } } /** * Executes the request and returns PluginResult. * @param action The action to execute. * @param args JSONArry of arguments for the plugin. * @param callbackContext The callback context used when calling back into JavaScript. * @return A PluginResult object with a status and message. */ public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { CordovaResourceApi resourceApi = webView.getResourceApi(); PluginResult.Status status = PluginResult.Status.OK; String result = ""; if (action.equals("startPlayingAudio")) { String target = args.getString(1); String fileUriStr; try { Uri targetUri = resourceApi.remapUri(Uri.parse(target)); fileUriStr = targetUri.toString(); } catch (IllegalArgumentException e) { fileUriStr = target; } this.startPlayingAudio(args.getString(0), FileHelper.stripFileProtocol(fileUriStr)); } else if (action.equals("pausePlayingAudio")) { this.pausePlayingAudio(args.getString(0)); } else if (action.equals("stopPlayingAudio")) { this.stopPlayingAudio(args.getString(0)); } else if (action.equals("create")) { String id = args.getString(0); String src = FileHelper.stripFileProtocol(args.getString(1)); getOrCreatePlayer(id, src); } else if (action.equals("release")) { boolean b = this.release(args.getString(0)); callbackContext.sendPluginResult(new PluginResult(status, b)); return true; } else if (action.equals("messageChannel")) { messageChannel = callbackContext; return true; } else { // Unrecognized action. return false; } callbackContext.sendPluginResult(new PluginResult(status, result)); return true; } /** * Stop all audio players and recorders. */ public void onDestroy() { if (!players.isEmpty()) { onLastPlayerReleased(); } for (AudioPlayer audio : this.players.values()) { audio.destroy(); } this.players.clear(); } /** * Stop all audio players and recorders on navigate. */ @Override public void onReset() { onDestroy(); } /** * Called when a message is sent to plugin. * * @param id The message id * @param data The message data * @return Object to stop propagation or null */ public Object onMessage(String id, Object data) { // If phone message if (id.equals("telephone")) { // If phone ringing, then pause playing if ("ringing".equals(data) || "offhook".equals(data)) { // Get all audio players and pause them for (AudioPlayer audio : this.players.values()) { if (audio.getState() == AudioPlayer.STATE.MEDIA_RUNNING.ordinal()) { this.pausedForPhone.add(audio); audio.stopPlaying(); } } } // If phone idle, then resume playing those players we paused else if ("idle".equals(data)) { for (AudioPlayer audio : this.pausedForPhone) { audio.startPlaying(null); } this.pausedForPhone.clear(); } } return null; } //-------------------------------------------------------------------------- // LOCAL METHODS //-------------------------------------------------------------------------- private AudioPlayer getOrCreatePlayer(String id, String file) { AudioPlayer ret = players.get(id); if (ret == null) { if (players.isEmpty()) { onFirstPlayerCreated(); } ret = new AudioPlayer(this, id, file); players.put(id, ret); } return ret; } /** * Release the audio player instance to save memory. * @param id The id of the audio player */ private boolean release(String id) { AudioPlayer audio = players.remove(id); if (audio == null) { return false; } if (players.isEmpty()) { onLastPlayerReleased(); } audio.destroy(); return true; } /** * Start or resume playing audio file. * @param id The id of the audio player * @param file The name of the audio file. */ public void startPlayingAudio(String id, String file) { AudioPlayer audio = getOrCreatePlayer(id, file); audio.startPlaying(file); getAudioFocus(); } /** * Pause playing. * @param id The id of the audio player */ public void pausePlayingAudio(String id) { AudioPlayer audio = this.players.get(id); if (audio != null) { audio.stopPlaying(); } } /** * Stop playing the audio file. * @param id The id of the audio player */ public void stopPlayingAudio(String id) { AudioPlayer audio = this.players.get(id); if (audio != null) { audio.stopPlaying(); } } /** * Set the audio device to be used for playback. * * @param output 1=earpiece, 2=speaker */ @SuppressWarnings("deprecation") public void setAudioOutputDevice(int output) { String TAG1 = "AudioHandler.setAudioOutputDevice(): Error : "; AudioManager audiMgr = (AudioManager) this.cordova.getActivity().getSystemService(Context.AUDIO_SERVICE); if (output == 2) { audiMgr.setRouting(AudioManager.MODE_NORMAL, AudioManager.ROUTE_SPEAKER, AudioManager.ROUTE_ALL); } else if (output == 1) { audiMgr.setRouting(AudioManager.MODE_NORMAL, AudioManager.ROUTE_EARPIECE, AudioManager.ROUTE_ALL); } else { LOG.e(TAG1," Unknown output device"); } } public void pauseAllLostFocus() { for (AudioPlayer audio : this.players.values()) { if (audio.getState() == AudioPlayer.STATE.MEDIA_RUNNING.ordinal()) { this.pausedForFocus.add(audio); audio.stopPlaying(); } } } public void resumeAllGainedFocus() { for (AudioPlayer audio : this.pausedForFocus) { audio.startPlaying(null); } this.pausedForFocus.clear(); } /** * Get the the audio focus */ private OnAudioFocusChangeListener focusChangeListener = new OnAudioFocusChangeListener() { public void onAudioFocusChange(int focusChange) { switch (focusChange) { case (AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK) : case (AudioManager.AUDIOFOCUS_LOSS_TRANSIENT) : case (AudioManager.AUDIOFOCUS_LOSS) : pauseAllLostFocus(); break; case (AudioManager.AUDIOFOCUS_GAIN): resumeAllGainedFocus(); break; default: break; } } }; public void getAudioFocus() { String TAG2 = "AudioHandler.getAudioFocus(): Error : "; AudioManager am = (AudioManager) this.cordova.getActivity().getSystemService(Context.AUDIO_SERVICE); int result = am.requestAudioFocus(focusChangeListener, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN); if (result != AudioManager.AUDIOFOCUS_REQUEST_GRANTED) { LOG.e(TAG2,result + " instead of " + AudioManager.AUDIOFOCUS_REQUEST_GRANTED); } } /** * Get the audio device to be used for playback. * * @return 1=earpiece, 2=speaker */ @SuppressWarnings("deprecation") public int getAudioOutputDevice() { AudioManager audiMgr = (AudioManager) this.cordova.getActivity().getSystemService(Context.AUDIO_SERVICE); if (audiMgr.getRouting(AudioManager.MODE_NORMAL) == AudioManager.ROUTE_EARPIECE) { return 1; } else if (audiMgr.getRouting(AudioManager.MODE_NORMAL) == AudioManager.ROUTE_SPEAKER) { return 2; } else { return -1; } } private void onFirstPlayerCreated() { origVolumeStream = cordova.getActivity().getVolumeControlStream(); cordova.getActivity().setVolumeControlStream(AudioManager.STREAM_MUSIC); } private void onLastPlayerReleased() { if (origVolumeStream != -1) { cordova.getActivity().setVolumeControlStream(origVolumeStream); origVolumeStream = -1; } } void sendEventMessage(String action, JSONObject actionData) { JSONObject message = new JSONObject(); try { message.put("action", action); if (actionData != null) { message.put(action, actionData); } } catch (JSONException e) { LOG.e(TAG, "Failed to create event message", e); } PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, message); pluginResult.setKeepCallback(true); if (messageChannel != null) { messageChannel.sendPluginResult(pluginResult); } } }
/** * Copyright 2015 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex.internal.subscriptions; import static org.mockito.Mockito.*; import static org.junit.Assert.*; import org.junit.Test; import org.reactivestreams.Subscription; import io.reactivex.disposables.Disposable; public class AsyncSubscriptionTest { @Test public void testNoResource() { AsyncSubscription as = new AsyncSubscription(); Subscription s = mock(Subscription.class); assertTrue(as.setSubscription(s)); as.request(1); as.cancel(); verify(s).request(1); verify(s).cancel(); } @Test public void testRequestBeforeSet() { AsyncSubscription as = new AsyncSubscription(); Subscription s = mock(Subscription.class); as.request(1); assertTrue(as.setSubscription(s)); as.cancel(); verify(s).request(1); verify(s).cancel(); } @Test public void testCancelBeforeSet() { AsyncSubscription as = new AsyncSubscription(); Subscription s = mock(Subscription.class); as.request(1); as.cancel(); assertFalse(as.setSubscription(s)); verify(s, never()).request(1); verify(s).cancel(); } @Test public void testSingleSet() { AsyncSubscription as = new AsyncSubscription(); Subscription s = mock(Subscription.class); assertTrue(as.setSubscription(s)); Subscription s1 = mock(Subscription.class); assertTrue(as.setSubscription(s1)); assertSame(as.actual, s); verify(s1).cancel(); } @Test public void testInitialResource() { Disposable r = mock(Disposable.class); AsyncSubscription as = new AsyncSubscription(r); as.cancel(); verify(r).dispose(); } @Test public void testSetResource() { AsyncSubscription as = new AsyncSubscription(); Disposable r = mock(Disposable.class); assertTrue(as.setResource(r)); as.cancel(); verify(r).dispose(); } @Test public void testReplaceResource() { AsyncSubscription as = new AsyncSubscription(); Disposable r = mock(Disposable.class); assertTrue(as.replaceResource(r)); as.cancel(); verify(r).dispose(); } @Test public void testSetResource2() { AsyncSubscription as = new AsyncSubscription(); Disposable r = mock(Disposable.class); assertTrue(as.setResource(r)); Disposable r2 = mock(Disposable.class); assertTrue(as.setResource(r2)); as.cancel(); verify(r).dispose(); verify(r2).dispose(); } @Test public void testReplaceResource2() { AsyncSubscription as = new AsyncSubscription(); Disposable r = mock(Disposable.class); assertTrue(as.replaceResource(r)); Disposable r2 = mock(Disposable.class); assertTrue(as.replaceResource(r2)); as.cancel(); verify(r, never()).dispose(); verify(r2).dispose(); } @Test public void testSetResourceAfterCancel() { AsyncSubscription as = new AsyncSubscription(); as.cancel(); Disposable r = mock(Disposable.class); assertFalse(as.setResource(r)); verify(r).dispose(); } @Test public void testReplaceResourceAfterCancel() { AsyncSubscription as = new AsyncSubscription(); as.cancel(); Disposable r = mock(Disposable.class); assertFalse(as.replaceResource(r)); verify(r).dispose(); } @Test public void testCancelOnce() { Disposable r = mock(Disposable.class); AsyncSubscription as = new AsyncSubscription(r); Subscription s = mock(Subscription.class); assertTrue(as.setSubscription(s)); as.cancel(); as.cancel(); as.cancel(); verify(s, never()).request(anyLong()); verify(s).cancel(); verify(r).dispose(); } }
/* * Copyright (c) 2015 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.heroic.shell; import com.fasterxml.jackson.databind.ObjectMapper; import com.spotify.heroic.HeroicCoreInstance; import com.spotify.heroic.common.DateRange; import com.spotify.heroic.common.RangeFilter; import com.spotify.heroic.dagger.CoreComponent; import com.spotify.heroic.filter.Filter; import com.spotify.heroic.filter.FilterFactory; import com.spotify.heroic.grammar.QueryParser; import com.spotify.heroic.metric.BackendKeyFilter; import com.spotify.heroic.shell.task.AnalyticsDumpFetchSeries; import com.spotify.heroic.shell.task.AnalyticsReportFetchSeries; import com.spotify.heroic.shell.task.BackendKeyArgument; import com.spotify.heroic.shell.task.Configure; import com.spotify.heroic.shell.task.CountData; import com.spotify.heroic.shell.task.DataMigrate; import com.spotify.heroic.shell.task.DeleteKeys; import com.spotify.heroic.shell.task.DeserializeKey; import com.spotify.heroic.shell.task.Fetch; import com.spotify.heroic.shell.task.IngestionFilter; import com.spotify.heroic.shell.task.Keys; import com.spotify.heroic.shell.task.ListBackends; import com.spotify.heroic.shell.task.LoadGenerated; import com.spotify.heroic.shell.task.MetadataCount; import com.spotify.heroic.shell.task.MetadataDelete; import com.spotify.heroic.shell.task.MetadataEntries; import com.spotify.heroic.shell.task.MetadataFetch; import com.spotify.heroic.shell.task.MetadataLoad; import com.spotify.heroic.shell.task.MetadataMigrate; import com.spotify.heroic.shell.task.MetadataTags; import com.spotify.heroic.shell.task.ParseQuery; import com.spotify.heroic.shell.task.Pause; import com.spotify.heroic.shell.task.Query; import com.spotify.heroic.shell.task.ReadWriteTest; import com.spotify.heroic.shell.task.Resume; import com.spotify.heroic.shell.task.SerializeKey; import com.spotify.heroic.shell.task.Statistics; import com.spotify.heroic.shell.task.StringifyQuery; import com.spotify.heroic.shell.task.SuggestKey; import com.spotify.heroic.shell.task.SuggestPerformance; import com.spotify.heroic.shell.task.SuggestTag; import com.spotify.heroic.shell.task.SuggestTagKeyCount; import com.spotify.heroic.shell.task.SuggestTagValue; import com.spotify.heroic.shell.task.SuggestTagValues; import com.spotify.heroic.shell.task.Write; import com.spotify.heroic.shell.task.WritePerformance; import lombok.Getter; import org.apache.commons.lang3.StringUtils; import org.joda.time.Chronology; import org.joda.time.DateTime; import org.joda.time.chrono.ISOChronology; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeParser; import org.joda.time.format.DateTimeParserBucket; import org.kohsuke.args4j.Option; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.function.Function; public final class Tasks { static final List<ShellTaskDefinition> available = new ArrayList<>(); static final Map<Class<?>, ShellTaskDefinition> availableMap = new HashMap<>(); static { shellTask(Configure::setup, Configure.class); shellTask(Statistics::setup, Statistics.class); shellTask(Keys::setup, Keys.class); shellTask(DeleteKeys::setup, DeleteKeys.class); shellTask(CountData::setup, CountData.class); shellTask(SerializeKey::setup, SerializeKey.class); shellTask(DeserializeKey::setup, DeserializeKey.class); shellTask(ListBackends::setup, ListBackends.class); shellTask(Fetch::setup, Fetch.class); shellTask(Write::setup, Write.class); shellTask(WritePerformance::setup, WritePerformance.class); shellTask(MetadataDelete::setup, MetadataDelete.class); shellTask(MetadataFetch::setup, MetadataFetch.class); shellTask(MetadataTags::setup, MetadataTags.class); shellTask(MetadataCount::setup, MetadataCount.class); shellTask(MetadataEntries::setup, MetadataEntries.class); shellTask(MetadataMigrate::setup, MetadataMigrate.class); shellTask(MetadataLoad::setup, MetadataLoad.class); shellTask(SuggestTag::setup, SuggestTag.class); shellTask(SuggestKey::setup, SuggestKey.class); shellTask(SuggestTagValue::setup, SuggestTagValue.class); shellTask(SuggestTagValues::setup, SuggestTagValues.class); shellTask(SuggestTagKeyCount::setup, SuggestTagKeyCount.class); shellTask(SuggestPerformance::setup, SuggestPerformance.class); shellTask(Query::setup, Query.class); shellTask(ReadWriteTest::setup, ReadWriteTest.class); shellTask(Pause::setup, Pause.class); shellTask(Resume::setup, Resume.class); shellTask(IngestionFilter::setup, IngestionFilter.class); shellTask(DataMigrate::setup, DataMigrate.class); shellTask(ParseQuery::setup, ParseQuery.class); shellTask(StringifyQuery::setup, StringifyQuery.class); shellTask(AnalyticsReportFetchSeries::setup, AnalyticsReportFetchSeries.class); shellTask(AnalyticsDumpFetchSeries::setup, AnalyticsDumpFetchSeries.class); shellTask(LoadGenerated::setup, LoadGenerated.class); } public static List<ShellTaskDefinition> available() { return available; } public static Map<Class<?>, ShellTaskDefinition> availableMap() { return availableMap; } static <T extends ShellTask> ShellTaskDefinition shellTask( final Function<CoreComponent, T> task, Class<T> type ) { final String usage = taskUsage(type); final String name = name(type); final List<String> names = allNames(type); final List<String> aliases = aliases(type); final ShellTaskDefinition d = new ShellTaskDefinition() { @Override public String name() { return name; } @Override public List<String> names() { return names; } @Override public List<String> aliases() { return aliases; } @Override public String usage() { return usage; } @Override public ShellTask setup(final HeroicCoreInstance core) throws Exception { return core.inject(task); } }; available.add(d); availableMap.put(type, d); return d; } public static String taskUsage(final Class<? extends ShellTask> task) { final TaskUsage u = task.getAnnotation(TaskUsage.class); if (u != null) { return u.value(); } return String.format("<no @ShellTaskUsage annotation for %s>", task.getCanonicalName()); } public static String name(final Class<? extends ShellTask> task) { final TaskName n = task.getAnnotation(TaskName.class); if (n != null) { return n.value(); } throw new IllegalStateException( String.format("No name configured with @TaskName on %s", task.getCanonicalName())); } public static List<String> allNames(final Class<? extends ShellTask> task) { final TaskName n = task.getAnnotation(TaskName.class); final List<String> names = new ArrayList<>(); if (n != null) { names.add(n.value()); for (final String alias : n.aliases()) { names.add(alias); } } if (names.isEmpty()) { throw new IllegalStateException( String.format("No name configured with @TaskName on %s", task.getCanonicalName())); } return names; } public static List<String> aliases(final Class<? extends ShellTask> task) { final TaskName n = task.getAnnotation(TaskName.class); final List<String> names = new ArrayList<>(); if (n != null) { for (final String alias : n.aliases()) { names.add(alias); } } return names; } public static Filter setupFilter( FilterFactory filters, QueryParser parser, TaskQueryParameters params ) { final List<String> query = params.getQuery(); if (query.isEmpty()) { return filters.t(); } return parser.parseFilter(StringUtils.join(query, " ")); } public static BackendKeyFilter setupKeyFilter(KeyspaceBase params, ObjectMapper mapper) throws Exception { BackendKeyFilter filter = BackendKeyFilter.of(); if (params.start != null) { filter = filter.withStart(BackendKeyFilter.gte( mapper.readValue(params.start, BackendKeyArgument.class).toBackendKey())); } if (params.startPercentage >= 0) { filter = filter.withStart( BackendKeyFilter.gtePercentage((float) params.startPercentage / 100f)); } if (params.startToken != null) { filter = filter.withStart(BackendKeyFilter.gteToken(params.startToken)); } if (params.end != null) { filter = filter.withEnd(BackendKeyFilter.lt( mapper.readValue(params.end, BackendKeyArgument.class).toBackendKey())); } if (params.endPercentage >= 0) { filter = filter.withEnd(BackendKeyFilter.ltPercentage((float) params.endPercentage / 100f)); } if (params.endToken != null) { filter = filter.withEnd(BackendKeyFilter.ltToken(params.endToken)); } if (params.limit >= 0) { filter = filter.withLimit(params.limit); } return filter; } public abstract static class QueryParamsBase extends AbstractShellTaskParams implements TaskQueryParameters { private final DateRange defaultDateRange; public QueryParamsBase() { final long now = System.currentTimeMillis(); final long start = now - TimeUnit.MILLISECONDS.convert(7, TimeUnit.DAYS); this.defaultDateRange = new DateRange(start, now); } @Override public DateRange getRange() { return defaultDateRange; } } public abstract static class KeyspaceBase extends QueryParamsBase { @Option(name = "--start", usage = "First key to operate on", metaVar = "<json>") protected String start; @Option(name = "--end", usage = "Last key to operate on (exclusive)", metaVar = "<json>") protected String end; @Option(name = "--start-percentage", usage = "First key to operate on in percentage", metaVar = "<int>") protected int startPercentage = -1; @Option(name = "--end-percentage", usage = "Last key to operate on (exclusive) in percentage", metaVar = "<int>") protected int endPercentage = -1; @Option(name = "--start-token", usage = "First token to operate on", metaVar = "<long>") protected Long startToken = null; @Option(name = "--end-token", usage = "Last token to operate on (exclusive)", metaVar = "<int>") protected Long endToken = null; @Option(name = "--limit", usage = "Limit the number keys to operate on", metaVar = "<int>") @Getter protected int limit = -1; } public static RangeFilter setupRangeFilter( FilterFactory filters, QueryParser parser, TaskQueryParameters params ) { final Filter filter = setupFilter(filters, parser, params); return new RangeFilter(filter, params.getRange(), params.getLimit()); } private static final List<DateTimeParser> today = new ArrayList<>(); private static final List<DateTimeParser> full = new ArrayList<>(); static { today.add(DateTimeFormat.forPattern("HH:mm").getParser()); today.add(DateTimeFormat.forPattern("HH:mm:ss").getParser()); today.add(DateTimeFormat.forPattern("HH:mm:ss.SSS").getParser()); full.add(DateTimeFormat.forPattern("yyyy-MM-dd/HH:mm").getParser()); full.add(DateTimeFormat.forPattern("yyyy-MM-dd/HH:mm:ss").getParser()); full.add(DateTimeFormat.forPattern("yyyy-MM-dd/HH:mm:ss.SSS").getParser()); } public static long parseInstant(String input, long now) { if (input.charAt(0) == '+') { return now + Long.parseLong(input.substring(1)); } if (input.charAt(0) == '-') { return now - Long.parseLong(input.substring(1)); } // try to parse just milliseconds try { return Long.parseLong(input); } catch (IllegalArgumentException e) { // pass-through } final Chronology chrono = ISOChronology.getInstanceUTC(); if (input.indexOf('/') >= 0) { return parseFullInstant(input, chrono); } return parseTodayInstant(input, chrono, now); } private static long parseTodayInstant(String input, final Chronology chrono, long now) { final DateTime n = new DateTime(now, chrono); for (final DateTimeParser p : today) { final DateTimeParserBucket bucket = new DateTimeParserBucket(0, chrono, null, null, 2000); bucket.saveField(chrono.year(), n.getYear()); bucket.saveField(chrono.monthOfYear(), n.getMonthOfYear()); bucket.saveField(chrono.dayOfYear(), n.getDayOfYear()); try { p.parseInto(bucket, input, 0); } catch (IllegalArgumentException e) { // pass-through continue; } return bucket.computeMillis(); } throw new IllegalArgumentException(input + " is not a valid instant"); } private static long parseFullInstant(String input, final Chronology chrono) { for (final DateTimeParser p : full) { final DateTimeParserBucket bucket = new DateTimeParserBucket(0, chrono, null, null, 2000); try { p.parseInto(bucket, input, 0); } catch (IllegalArgumentException e) { // pass-through continue; } return bucket.computeMillis(); } throw new IllegalArgumentException(input + " is not a valid instant"); } public static String formatTimeNanos(long diff) { if (diff < 1000) { return String.format("%d ns", diff); } if (diff < 1000000) { final double v = ((double) diff) / 1000; return String.format("%.3f us", v); } if (diff < 1000000000) { final double v = ((double) diff) / 1000000; return String.format("%.3f ms", v); } final double v = ((double) diff) / 1000000000; return String.format("%.3f s", v); } }
/****************************************************************************** * * * Copyright (c) 1999-2003 Wimba S.A., All Rights Reserved. * * * * COPYRIGHT: * * This software is the property of Wimba S.A. * * This software is redistributed under the Xiph.org variant of * * the BSD license. * * Redistribution and use in source and binary forms, with or without * * modification, are permitted provided that the following conditions * * are met: * * - Redistributions of source code must retain the above copyright * * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * * notice, this list of conditions and the following disclaimer in the * * documentation and/or other materials provided with the distribution. * * - Neither the name of Wimba, the Xiph.org Foundation nor the names of * * its contributors may be used to endorse or promote products derived * * from this software without specific prior written permission. * * * * WARRANTIES: * * This software is made available by the authors in the hope * * that it will be useful, but without any warranty. * * Wimba S.A. is not liable for any consequence related to the * * use of the provided software. * * * * Class: NbEncoder.java * * * * Author: Marc GIMPEL * * Based on code by: Jean-Marc VALIN * * * * Date: 9th April 2003 * * * ******************************************************************************/ /* $Id: NbEncoder.java,v 1.2 2004/10/21 16:21:57 mgimpel Exp $ */ /* Copyright (C) 2002 Jean-Marc Valin Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the Xiph.org Foundation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package speex; /** * Narrowband Speex Encoder * * @author Marc Gimpel, Wimba S.A. (mgimpel@horizonwimba.com) * @version $Revision: 1.2 $ */ public class NbEncoder extends NbCodec implements Encoder { /** The Narrowband Quality map indicates which narrowband submode to use for the given narrowband quality setting */ public static final int[] NB_QUALITY_MAP = {1, 8, 2, 3, 3, 4, 4, 5, 5, 6, 7}; private int bounded_pitch; /** Next frame should not rely on previous frames for pitch */ private int[] pitch; /** */ private float pre_mem2; /** 1-element memory for pre-emphasis */ private float[] exc2Buf; /** "Pitch enhanced" excitation */ private int exc2Idx; /** "Pitch enhanced" excitation */ private float[] swBuf; /** Weighted signal buffer */ private int swIdx; /** Start of weighted signal frame */ private float[] window; /** Temporary (Hanning) window */ private float[] buf2; /** 2nd temporary buffer */ private float[] autocorr; /** auto-correlation */ private float[] lagWindow; /** Window applied to auto-correlation */ private float[] lsp; /** LSPs for current frame */ private float[] old_lsp; /** LSPs for previous frame */ private float[] interp_lsp; /** Interpolated LSPs */ private float[] interp_lpc; /** Interpolated LPCs */ private float[] bw_lpc1; /** LPCs after bandwidth expansion by gamma1 for perceptual weighting*/ private float[] bw_lpc2; /** LPCs after bandwidth expansion by gamma2 for perceptual weighting*/ private float[] rc; /** Reflection coefficients */ private float[] mem_sw; /** Filter memory for perceptually-weighted signal */ private float[] mem_sw_whole; /** Filter memory for perceptually-weighted signal (whole frame)*/ private float[] mem_exc; /** Filter memory for excitation (whole frame) */ private Vbr vbr; /** State of the VBR data */ private int dtx_count; /** Number of consecutive DTX frames */ private float[] innov2; protected int complexity; /** Complexity setting (0-10 from least complex to most complex) */ protected int vbr_enabled; /** 1 for enabling VBR, 0 otherwise */ protected int vad_enabled; /** 1 for enabling VAD, 0 otherwise */ protected int abr_enabled; /** ABR setting (in bps), 0 if off */ protected float vbr_quality; /** Quality setting for VBR encoding */ protected float relative_quality; /** Relative quality that will be needed by VBR */ protected float abr_drift; protected float abr_drift2; protected float abr_count; protected int sampling_rate; protected int submodeSelect; /** Mode chosen by the user (may differ from submodeID if VAD is on) */ /** * Initialisation * @param frameSize * @param subframeSize * @param lpcSize * @param bufSize */ public void init(final int frameSize, final int subframeSize, final int lpcSize, final int bufSize) { super.init(frameSize, subframeSize, lpcSize, bufSize); complexity = 3; // in C it's 2 here, but set to 3 automatically by the encoder vbr_enabled = 0; // disabled by default vad_enabled = 0; // disabled by default abr_enabled = 0; // disabled by default vbr_quality = 8; submodeSelect = 5; pre_mem2 = 0; bounded_pitch = 1; exc2Buf = new float[bufSize]; exc2Idx = bufSize - windowSize; swBuf = new float[bufSize]; swIdx = bufSize - windowSize; window = Misc.window(windowSize, subframeSize); lagWindow = Misc.lagWindow(lpcSize, lag_factor); autocorr = new float[lpcSize+1]; buf2 = new float[windowSize]; interp_lpc = new float[lpcSize+1]; interp_qlpc = new float[lpcSize+1]; bw_lpc1 = new float[lpcSize+1]; bw_lpc2 = new float[lpcSize+1]; lsp = new float[lpcSize]; qlsp = new float[lpcSize]; old_lsp = new float[lpcSize]; old_qlsp = new float[lpcSize]; interp_lsp = new float[lpcSize]; interp_qlsp = new float[lpcSize]; rc = new float[lpcSize]; mem_sp = new float[lpcSize]; // why was there a *5 before ?!? mem_sw = new float[lpcSize]; mem_sw_whole = new float[lpcSize]; mem_exc = new float[lpcSize]; vbr = new Vbr(); dtx_count = 0; abr_count = 0; sampling_rate = 8000; awk1 = new float[lpcSize+1]; awk2 = new float[lpcSize+1]; awk3 = new float[lpcSize+1]; innov2 = new float[40]; filters.init (); pitch = new int[nbSubframes]; } /** * Encode the given input signal. * @param bits - Speex bits buffer. * @param in - the raw mono audio frame to encode. * @return return 1 if successful. */ public int encode(final Bits bits, final float[] in) { int i; float[] res, target, mem; float[] syn_resp; float[] orig; /* Copy new data in input buffer */ System.arraycopy(frmBuf, frameSize, frmBuf, 0, bufSize-frameSize); frmBuf[bufSize-frameSize] = in[0] - preemph*pre_mem; for (i=1; i<frameSize; i++) frmBuf[bufSize-frameSize+i] = in[i] - preemph*in[i-1]; pre_mem = in[frameSize-1]; /* Move signals 1 frame towards the past */ System.arraycopy(exc2Buf, frameSize, exc2Buf, 0, bufSize-frameSize); System.arraycopy(excBuf, frameSize, excBuf, 0, bufSize-frameSize); System.arraycopy(swBuf, frameSize, swBuf, 0, bufSize-frameSize); /* Window for analysis */ for (i=0; i<windowSize; i++) buf2[i] = frmBuf[i+frmIdx] * window[i]; /* Compute auto-correlation */ Lpc.autocorr(buf2, autocorr, lpcSize+1, windowSize); autocorr[0] += 10; /* prevents NANs */ autocorr[0] *= lpc_floor; /* Noise floor in auto-correlation domain */ /* Lag windowing: equivalent to filtering in the power-spectrum domain */ for (i=0; i<lpcSize+1; i++) autocorr[i] *= lagWindow[i]; /* Levinson-Durbin */ Lpc.wld(lpc, autocorr, rc, lpcSize); // tmperr System.arraycopy(lpc, 0, lpc, 1, lpcSize); lpc[0]=1; /* LPC to LSPs (x-domain) transform */ int roots=Lsp.lpc2lsp (lpc, lpcSize, lsp, 15, 0.2f); /* Check if we found all the roots */ if (roots==lpcSize) { /* LSP x-domain to angle domain*/ for (i=0;i<lpcSize;i++) lsp[i] = (float)Math.acos(lsp[i]); } else { /* Search again if we can afford it */ if (complexity>1) roots = Lsp.lpc2lsp (lpc, lpcSize, lsp, 11, 0.05f); if (roots==lpcSize) { /* LSP x-domain to angle domain*/ for (i=0;i<lpcSize;i++) lsp[i] = (float)Math.acos(lsp[i]); } else { /*If we can't find all LSP's, do some damage control and use previous filter*/ for (i=0;i<lpcSize;i++) { lsp[i]=old_lsp[i]; } } } float lsp_dist=0; for (i=0;i<lpcSize;i++) lsp_dist += (old_lsp[i] - lsp[i])*(old_lsp[i] - lsp[i]); /* Whole frame analysis (open-loop estimation of pitch and excitation gain) */ float ol_gain; int ol_pitch; float ol_pitch_coef; { if (first != 0) for (i=0; i<lpcSize;i++) interp_lsp[i] = lsp[i]; else for (i=0;i<lpcSize;i++) interp_lsp[i] = .375f*old_lsp[i] + .625f*lsp[i]; Lsp.enforce_margin(interp_lsp, lpcSize, .002f); /* Compute interpolated LPCs (unquantized) for whole frame*/ for (i=0; i<lpcSize; i++) interp_lsp[i] = (float)Math.cos(interp_lsp[i]); m_lsp.lsp2lpc(interp_lsp, interp_lpc, lpcSize); /*Open-loop pitch*/ if (submodes[submodeID] == null || vbr_enabled != 0 || vad_enabled != 0 || submodes[submodeID].forced_pitch_gain != 0 || submodes[submodeID].lbr_pitch != -1) { int[] nol_pitch = new int[6]; float[] nol_pitch_coef = new float[6]; Filters.bw_lpc(gamma1, interp_lpc, bw_lpc1, lpcSize); Filters.bw_lpc(gamma2, interp_lpc, bw_lpc2, lpcSize); Filters.filter_mem2(frmBuf, frmIdx, bw_lpc1, bw_lpc2, swBuf, swIdx, frameSize, lpcSize, mem_sw_whole, 0); Ltp.open_loop_nbest_pitch(swBuf, swIdx, min_pitch, max_pitch, frameSize, nol_pitch, nol_pitch_coef, 6); ol_pitch=nol_pitch[0]; ol_pitch_coef = nol_pitch_coef[0]; /*Try to remove pitch multiples*/ for (i=1;i<6;i++) { if ((nol_pitch_coef[i]>.85*ol_pitch_coef) && (Math.abs(nol_pitch[i]-ol_pitch/2.0)<=1 || Math.abs(nol_pitch[i]-ol_pitch/3.0)<=1 || Math.abs(nol_pitch[i]-ol_pitch/4.0)<=1 || Math.abs(nol_pitch[i]-ol_pitch/5.0)<=1)) { /*ol_pitch_coef=nol_pitch_coef[i];*/ ol_pitch = nol_pitch[i]; } } /*if (ol_pitch>50) ol_pitch/=2;*/ /*ol_pitch_coef = sqrt(ol_pitch_coef);*/ } else { ol_pitch=0; ol_pitch_coef=0; } /*Compute "real" excitation*/ Filters.fir_mem2(frmBuf, frmIdx, interp_lpc, excBuf, excIdx, frameSize, lpcSize, mem_exc); /* Compute open-loop excitation gain */ ol_gain=0; for (i=0;i<frameSize;i++) ol_gain += excBuf[excIdx+i]*excBuf[excIdx+i]; ol_gain=(float)Math.sqrt(1+ol_gain/frameSize); } /*VBR stuff*/ if (vbr != null && (vbr_enabled != 0 || vad_enabled != 0)) { if (abr_enabled != 0) { float qual_change=0; if (abr_drift2 * abr_drift > 0) { /* Only adapt if long-term and short-term drift are the same sign */ qual_change = -.00001f*abr_drift/(1+abr_count); if (qual_change>.05f) qual_change=.05f; if (qual_change<-.05f) qual_change=-.05f; } vbr_quality += qual_change; if (vbr_quality>10) vbr_quality=10; if (vbr_quality<0) vbr_quality=0; } relative_quality = vbr.analysis(in, frameSize, ol_pitch, ol_pitch_coef); /*if (delta_qual<0)*/ /* delta_qual*=.1*(3+st->vbr_quality);*/ if (vbr_enabled != 0) { int mode; int choice=0; float min_diff=100; mode = 8; while (mode > 0) { int v1; float thresh; v1=(int)Math.floor(vbr_quality); if (v1==10) thresh = Vbr.nb_thresh[mode][v1]; else thresh = (vbr_quality-v1)*Vbr.nb_thresh[mode][v1+1] + (1+v1-vbr_quality)*Vbr.nb_thresh[mode][v1]; if (relative_quality > thresh && relative_quality-thresh<min_diff) { choice = mode; min_diff = relative_quality-thresh; } mode--; } mode=choice; if (mode==0) { if (dtx_count==0 || lsp_dist>.05 || dtx_enabled==0 || dtx_count>20) { mode=1; dtx_count=1; } else { mode=0; dtx_count++; } } else { dtx_count=0; } setMode(mode); if (abr_enabled != 0) { int bitrate; bitrate = getBitRate(); abr_drift+=(bitrate-abr_enabled); abr_drift2 = .95f*abr_drift2 + .05f*(bitrate-abr_enabled); abr_count += 1.0; } } else { /*VAD only case*/ int mode; if (relative_quality<2) { if (dtx_count==0 || lsp_dist>.05 || dtx_enabled == 0 || dtx_count>20) { dtx_count=1; mode=1; } else { mode=0; dtx_count++; } } else { dtx_count = 0; mode=submodeSelect; } /*speex_encoder_ctl(state, SPEEX_SET_MODE, &mode);*/ submodeID=mode; } } else { relative_quality = -1; } /* First, transmit a zero for narrowband */ bits.pack(0, 1); /* Transmit the sub-mode we use for this frame */ bits.pack(submodeID, NB_SUBMODE_BITS); /* If null mode (no transmission), just set a couple things to zero*/ if (submodes[submodeID] == null) { for (i=0;i<frameSize;i++) excBuf[excIdx+i]=exc2Buf[exc2Idx+i]=swBuf[swIdx+i]=VERY_SMALL; for (i=0;i<lpcSize;i++) mem_sw[i]=0; first=1; bounded_pitch = 1; /* Final signal synthesis from excitation */ Filters.iir_mem2(excBuf, excIdx, interp_qlpc, frmBuf, frmIdx, frameSize, lpcSize, mem_sp); in[0] = frmBuf[frmIdx] + preemph*pre_mem2; for (i=1;i<frameSize;i++) in[i]=frmBuf[frmIdx=i] + preemph*in[i-1]; pre_mem2=in[frameSize-1]; return 0; } /* LSP Quantization */ if (first != 0) { for (i=0; i<lpcSize;i++) old_lsp[i] = lsp[i]; } /*Quantize LSPs*/ //#if 1 /*0 for unquantized*/ submodes[submodeID].lsqQuant.quant(lsp, qlsp, lpcSize, bits); //#else // for (i=0;i<lpcSize;i++) // qlsp[i]=lsp[i]; //#endif /*If we use low bit-rate pitch mode, transmit open-loop pitch*/ if (submodes[submodeID].lbr_pitch!=-1) { bits.pack(ol_pitch-min_pitch, 7); } if (submodes[submodeID].forced_pitch_gain != 0) { int quant; quant = (int)Math.floor(.5+15*ol_pitch_coef); if (quant>15) quant=15; if (quant<0) quant=0; bits.pack(quant, 4); ol_pitch_coef=(float) 0.066667*quant; } /*Quantize and transmit open-loop excitation gain*/ { int qe = (int)(Math.floor(0.5+3.5*Math.log(ol_gain))); if (qe<0) qe=0; if (qe>31) qe=31; ol_gain = (float) Math.exp(qe/3.5); bits.pack(qe, 5); } /* Special case for first frame */ if (first != 0) { for (i=0;i<lpcSize;i++) old_qlsp[i] = qlsp[i]; } /* Filter response */ res = new float[subframeSize]; /* Target signal */ target = new float[subframeSize]; syn_resp = new float[subframeSize]; mem = new float[lpcSize]; orig = new float[frameSize]; for (i=0;i<frameSize;i++) orig[i]=frmBuf[frmIdx+i]; /* Loop on sub-frames */ for (int sub=0;sub<nbSubframes;sub++) { float tmp; int offset; int sp, sw, exc, exc2; int pitchval; /* Offset relative to start of frame */ offset = subframeSize*sub; /* Original signal */ sp=frmIdx+offset; /* Excitation */ exc=excIdx+offset; /* Weighted signal */ sw=swIdx+offset; exc2=exc2Idx+offset; /* LSP interpolation (quantized and unquantized) */ tmp = (float) (1.0 + sub)/nbSubframes; for (i=0;i<lpcSize;i++) interp_lsp[i] = (1-tmp)*old_lsp[i] + tmp*lsp[i]; for (i=0;i<lpcSize;i++) interp_qlsp[i] = (1-tmp)*old_qlsp[i] + tmp*qlsp[i]; /* Make sure the filters are stable */ Lsp.enforce_margin(interp_lsp, lpcSize, .002f); Lsp.enforce_margin(interp_qlsp, lpcSize, .002f); /* Compute interpolated LPCs (quantized and unquantized) */ for (i=0;i<lpcSize;i++) interp_lsp[i] = (float) Math.cos(interp_lsp[i]); m_lsp.lsp2lpc(interp_lsp, interp_lpc, lpcSize); for (i=0;i<lpcSize;i++) interp_qlsp[i] = (float) Math.cos(interp_qlsp[i]); m_lsp.lsp2lpc(interp_qlsp, interp_qlpc, lpcSize); /* Compute analysis filter gain at w=pi (for use in SB-CELP) */ tmp=1; pi_gain[sub]=0; for (i=0;i<=lpcSize;i++) { pi_gain[sub] += tmp*interp_qlpc[i]; tmp = -tmp; } /* Compute bandwidth-expanded (unquantized) LPCs for perceptual weighting */ Filters.bw_lpc(gamma1, interp_lpc, bw_lpc1, lpcSize); if (gamma2>=0) Filters.bw_lpc(gamma2, interp_lpc, bw_lpc2, lpcSize); else { bw_lpc2[0]=1; bw_lpc2[1]=-preemph; for (i=2;i<=lpcSize;i++) bw_lpc2[i]=0; } /* Compute impulse response of A(z/g1) / ( A(z)*A(z/g2) )*/ for (i=0;i<subframeSize;i++) excBuf[exc+i]=0; excBuf[exc]=1; Filters.syn_percep_zero(excBuf, exc, interp_qlpc, bw_lpc1, bw_lpc2, syn_resp, subframeSize, lpcSize); /* Reset excitation */ for (i=0;i<subframeSize;i++) excBuf[exc+i]=0; for (i=0;i<subframeSize;i++) exc2Buf[exc2+i]=0; /* Compute zero response of A(z/g1) / ( A(z/g2) * A(z) ) */ for (i=0;i<lpcSize;i++) mem[i]=mem_sp[i]; Filters.iir_mem2(excBuf, exc, interp_qlpc, excBuf, exc, subframeSize, lpcSize, mem); for (i=0;i<lpcSize;i++) mem[i]=mem_sw[i]; Filters.filter_mem2(excBuf, exc, bw_lpc1, bw_lpc2, res, 0, subframeSize, lpcSize, mem, 0); /* Compute weighted signal */ for (i=0;i<lpcSize;i++) mem[i]=mem_sw[i]; Filters.filter_mem2(frmBuf, sp, bw_lpc1, bw_lpc2, swBuf, sw, subframeSize, lpcSize, mem, 0); /* Compute target signal */ for (i=0;i<subframeSize;i++) target[i]=swBuf[sw+i]-res[i]; for (i=0;i<subframeSize;i++) excBuf[exc+i]=exc2Buf[exc2+i]=0; /* If we have a long-term predictor (otherwise, something's wrong) */ // if (submodes[submodeID].ltp.quant) // { int pit_min, pit_max; /* Long-term prediction */ if (submodes[submodeID].lbr_pitch != -1) { /* Low bit-rate pitch handling */ int margin; margin = submodes[submodeID].lbr_pitch; if (margin != 0) { if (ol_pitch < min_pitch+margin-1) ol_pitch=min_pitch+margin-1; if (ol_pitch > max_pitch-margin) ol_pitch=max_pitch-margin; pit_min = ol_pitch-margin+1; pit_max = ol_pitch+margin; } else { pit_min=pit_max=ol_pitch; } } else { pit_min = min_pitch; pit_max = max_pitch; } /* Force pitch to use only the current frame if needed */ if (bounded_pitch != 0 && pit_max>offset) pit_max=offset; /* Perform pitch search */ pitchval = submodes[submodeID].ltp.quant(target, swBuf, sw, interp_qlpc, bw_lpc1, bw_lpc2, excBuf, exc, pit_min, pit_max, ol_pitch_coef, lpcSize, subframeSize, bits, exc2Buf, exc2, syn_resp, complexity); pitch[sub]=pitchval; // } else { // speex_error ("No pitch prediction, what's wrong"); // } /* Update target for adaptive codebook contribution */ Filters.syn_percep_zero(excBuf, exc, interp_qlpc, bw_lpc1, bw_lpc2, res, subframeSize, lpcSize); for (i=0;i<subframeSize;i++) target[i]-=res[i]; /* Quantization of innovation */ { int innovptr; float ener=0, ener_1; innovptr = sub*subframeSize; for (i=0;i<subframeSize;i++) innov[innovptr+i]=0; Filters.residue_percep_zero(target, 0, interp_qlpc, bw_lpc1, bw_lpc2, buf2, subframeSize, lpcSize); for (i=0;i<subframeSize;i++) ener+=buf2[i]*buf2[i]; ener=(float)Math.sqrt(.1f+ener/subframeSize); /*for (i=0;i<subframeSize;i++) System.out.print(buf2[i]/ener + "\t"); */ ener /= ol_gain; /* Calculate gain correction for the sub-frame (if any) */ if (submodes[submodeID].have_subframe_gain != 0) { int qe; ener=(float)Math.log(ener); if (submodes[submodeID].have_subframe_gain==3) { qe = VQ.index(ener, exc_gain_quant_scal3, 8); bits.pack(qe, 3); ener=exc_gain_quant_scal3[qe]; } else { qe = VQ.index(ener, exc_gain_quant_scal1, 2); bits.pack(qe, 1); ener=exc_gain_quant_scal1[qe]; } ener=(float)Math.exp(ener); } else { ener=1; } ener*=ol_gain; /*System.out.println(ener + " " + ol_gain);*/ ener_1 = 1/ener; /* Normalize innovation */ for (i=0;i<subframeSize;i++) target[i]*=ener_1; /* Quantize innovation */ // if (submodes[submodeID].innovation != null) // { /* Codebook search */ submodes[submodeID].innovation.quant(target, interp_qlpc, bw_lpc1, bw_lpc2, lpcSize, subframeSize, innov, innovptr, syn_resp, bits, complexity); /* De-normalize innovation and update excitation */ for (i=0;i<subframeSize;i++) innov[innovptr+i]*=ener; for (i=0;i<subframeSize;i++) excBuf[exc+i] += innov[innovptr+i]; // } else { // speex_error("No fixed codebook"); // } /* In some (rare) modes, we do a second search (more bits) to reduce noise even more */ if (submodes[submodeID].double_codebook != 0) { float[] innov2 = new float[subframeSize]; // for (i=0;i<subframeSize;i++) // innov2[i]=0; for (i=0;i<subframeSize;i++) target[i]*=2.2; submodes[submodeID].innovation.quant(target, interp_qlpc, bw_lpc1, bw_lpc2, lpcSize, subframeSize, innov2, 0, syn_resp, bits, complexity); for (i=0;i<subframeSize;i++) innov2[i]*=ener*(1/2.2); for (i=0;i<subframeSize;i++) excBuf[exc+i] += innov2[i]; } for (i=0;i<subframeSize;i++) target[i]*=ener; } /*Keep the previous memory*/ for (i=0;i<lpcSize;i++) mem[i]=mem_sp[i]; /* Final signal synthesis from excitation */ Filters.iir_mem2(excBuf, exc, interp_qlpc, frmBuf, sp, subframeSize, lpcSize, mem_sp); /* Compute weighted signal again, from synthesized speech (not sure it's the right thing) */ Filters.filter_mem2(frmBuf, sp, bw_lpc1, bw_lpc2, swBuf, sw, subframeSize, lpcSize, mem_sw, 0); for (i=0;i<subframeSize;i++) exc2Buf[exc2+i]=excBuf[exc+i]; } /* Store the LSPs for interpolation in the next frame */ if (submodeID>=1) { for (i=0;i<lpcSize;i++) old_lsp[i] = lsp[i]; for (i=0;i<lpcSize;i++) old_qlsp[i] = qlsp[i]; } if (submodeID==1) { if (dtx_count != 0) { bits.pack(15, 4); } else { bits.pack(0, 4); } } /* The next frame will not be the first (Duh!) */ first = 0; { float ener=0, err=0; float snr; for (i=0;i<frameSize;i++) { ener+=frmBuf[frmIdx+i]*frmBuf[frmIdx+i]; err += (frmBuf[frmIdx+i]-orig[i])*(frmBuf[frmIdx+i]-orig[i]); } snr = (float) (10*Math.log((ener+1)/(err+1))); /*System.out.println("Frame result: SNR="+snr+" E="+ener+" Err="+err+"\r\n");*/ } /* Replace input by synthesized speech */ in[0] = frmBuf[frmIdx] + preemph*pre_mem2; for (i=1;i<frameSize;i++) in[i]=frmBuf[frmIdx+i] + preemph*in[i-1]; pre_mem2=in[frameSize-1]; if (submodes[submodeID].innovation instanceof NoiseSearch || submodeID==0) bounded_pitch = 1; else bounded_pitch = 0; return 1; } /** * Returns the size in bits of an audio frame encoded with the current mode. * @return the size in bits of an audio frame encoded with the current mode. */ public int getEncodedFrameSize() { return NB_FRAME_SIZE[submodeID]; } //--------------------------------------------------------------------------- // Speex Control Functions //--------------------------------------------------------------------------- /** * Sets the Quality * @param quality */ public void setQuality(int quality) { if (quality < 0) { quality = 0; } if (quality > 10) { quality = 10; } submodeID = submodeSelect = NB_QUALITY_MAP[quality]; } /** * Gets the bitrate. * @return the bitrate. */ public int getBitRate() { if (submodes[submodeID] != null) return sampling_rate*submodes[submodeID].bits_per_frame/frameSize; else return sampling_rate*(NB_SUBMODE_BITS+1)/frameSize; } /** * */ // public void resetState() // { // } //--------------------------------------------------------------------------- //--------------------------------------------------------------------------- /** * Sets the encoding submode. * @param mode */ public void setMode(int mode) { if (mode < 0) { mode = 0; } submodeID = submodeSelect = mode; } /** * Returns the encoding submode currently in use. * @return the encoding submode currently in use. */ public int getMode() { return submodeID; } /** * Sets the bitrate. * @param bitrate */ public void setBitRate(final int bitrate) { for (int i=10; i>=0; i--) { setQuality(i); if (getBitRate() <= bitrate) return; } } /** * Sets whether or not to use Variable Bit Rate encoding. * @param vbr */ public void setVbr(final boolean vbr) { vbr_enabled = vbr ? 1 : 0; } /** * Returns whether or not we are using Variable Bit Rate encoding. * @return whether or not we are using Variable Bit Rate encoding. */ public boolean getVbr() { return vbr_enabled != 0; } /** * Sets whether or not to use Voice Activity Detection encoding. * @param vad */ public void setVad(final boolean vad) { vad_enabled = vad ? 1 : 0; } /** * Returns whether or not we are using Voice Activity Detection encoding. * @return whether or not we are using Voice Activity Detection encoding. */ public boolean getVad() { return vad_enabled != 0; } /** * Sets whether or not to use Discontinuous Transmission encoding. * @param dtx */ public void setDtx(final boolean dtx) { dtx_enabled = dtx ? 1 : 0; } /** * Returns the Average Bit Rate used (0 if ABR is not turned on). * @return the Average Bit Rate used (0 if ABR is not turned on). */ public int getAbr() { return abr_enabled; } /** * Sets the Average Bit Rate. * @param abr */ public void setAbr(final int abr) { abr_enabled = (abr!=0) ? 1 : 0; vbr_enabled = 1; { int i=10, rate, target; float vbr_qual; target = abr; while (i>=0) { setQuality(i); rate = getBitRate(); if (rate <= target) break; i--; } vbr_qual=i; if (vbr_qual<0) vbr_qual=0; setVbrQuality(vbr_qual); abr_count=0; abr_drift=0; abr_drift2=0; } } /** * Sets the Varible Bit Rate Quality. * @param quality */ public void setVbrQuality(float quality) { if (quality < 0f) quality = 0f; if (quality > 10f) quality = 10f; vbr_quality = quality; } /** * Returns the Varible Bit Rate Quality. * @return the Varible Bit Rate Quality. */ public float getVbrQuality() { return vbr_quality; } /** * Sets the algorthmic complexity. * @param complexity */ public void setComplexity(int complexity) { if (complexity < 0) complexity = 0; if (complexity > 10) complexity = 10; this.complexity = complexity; } /** * Returns the algorthmic complexity. * @return the algorthmic complexity. */ public int getComplexity() { return complexity; } /** * Sets the sampling rate. * @param rate */ public void setSamplingRate(final int rate) { sampling_rate = rate; } /** * Returns the sampling rate. * @return the sampling rate. */ public int getSamplingRate() { return sampling_rate; } /** * Return LookAhead. * @return LookAhead. */ public int getLookAhead() { return windowSize - frameSize; } /** * Returns the relative quality. * @return the relative quality. */ public float getRelativeQuality() { return relative_quality; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.etcd.springboot; import javax.annotation.Generated; import org.apache.camel.CamelContext; import org.apache.camel.util.jsse.SSLContextParameters; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.NestedConfigurationProperty; /** * The camel etcd component allows you to work with Etcd a distributed reliable * key-value store. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @ConfigurationProperties(prefix = "camel.component.etcd") public class EtcdComponentConfiguration { /** * To set the URIs the client connects. */ private String uris; /** * To configure security using SSLContextParameters. */ @NestedConfigurationProperty private SSLContextParameters sslContextParameters; /** * The user name to use for basic authentication. */ private String userName; /** * The password to use for basic authentication. */ private String password; /** * Sets the common configuration shared among endpoints */ private EtcdConfigurationNestedConfiguration configuration; /** * Enable usage of global SSL context parameters. */ private Boolean useGlobalSslContextParameters = false; /** * Whether the component should resolve property placeholders on itself when * starting. Only properties which are of String type can use property * placeholders. */ private Boolean resolvePropertyPlaceholders = true; public String getUris() { return uris; } public void setUris(String uris) { this.uris = uris; } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } public void setSslContextParameters( SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public EtcdConfigurationNestedConfiguration getConfiguration() { return configuration; } public void setConfiguration( EtcdConfigurationNestedConfiguration configuration) { this.configuration = configuration; } public Boolean getUseGlobalSslContextParameters() { return useGlobalSslContextParameters; } public void setUseGlobalSslContextParameters( Boolean useGlobalSslContextParameters) { this.useGlobalSslContextParameters = useGlobalSslContextParameters; } public Boolean getResolvePropertyPlaceholders() { return resolvePropertyPlaceholders; } public void setResolvePropertyPlaceholders( Boolean resolvePropertyPlaceholders) { this.resolvePropertyPlaceholders = resolvePropertyPlaceholders; } public static class EtcdConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.etcd.EtcdConfiguration.class; private CamelContext camelContext; /** * To set the URIs the client connects. */ private String uris = "http://localhost:2379,http://localhost:4001"; /** * To configure security using SSLContextParameters. */ @NestedConfigurationProperty private SSLContextParameters sslContextParameters; /** * The user name to use for basic authentication. */ private String userName; /** * The password to use for basic authentication. */ private String password; /** * To send an empty message in case of timeout watching for a key. */ private Boolean sendEmptyExchangeOnTimeout = false; /** * To apply an action recursively. */ private Boolean recursive = false; /** * To set the lifespan of a key in milliseconds. */ private Integer timeToLive; /** * To set the maximum time an action could take to complete. */ private Long timeout; /** * The index to watch from */ private Long fromIndex = 0L; /** * The path to look for for service discovery */ private String servicePath = "/services/"; public CamelContext getCamelContext() { return camelContext; } public void setCamelContext(CamelContext camelContext) { this.camelContext = camelContext; } public String getUris() { return uris; } public void setUris(String uris) { this.uris = uris; } public SSLContextParameters getSslContextParameters() { return sslContextParameters; } public void setSslContextParameters( SSLContextParameters sslContextParameters) { this.sslContextParameters = sslContextParameters; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public Boolean getSendEmptyExchangeOnTimeout() { return sendEmptyExchangeOnTimeout; } public void setSendEmptyExchangeOnTimeout( Boolean sendEmptyExchangeOnTimeout) { this.sendEmptyExchangeOnTimeout = sendEmptyExchangeOnTimeout; } public Boolean getRecursive() { return recursive; } public void setRecursive(Boolean recursive) { this.recursive = recursive; } public Integer getTimeToLive() { return timeToLive; } public void setTimeToLive(Integer timeToLive) { this.timeToLive = timeToLive; } public Long getTimeout() { return timeout; } public void setTimeout(Long timeout) { this.timeout = timeout; } public Long getFromIndex() { return fromIndex; } public void setFromIndex(Long fromIndex) { this.fromIndex = fromIndex; } public String getServicePath() { return servicePath; } public void setServicePath(String servicePath) { this.servicePath = servicePath; } } }
/* * Copyright 2005 Sascha Weinreuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.intellij.lang.xpath.xslt.impl; import com.intellij.lang.documentation.DocumentationProvider; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.light.LightElement; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.xml.*; import com.intellij.util.IncorrectOperationException; import com.intellij.xml.util.XmlUtil; import org.intellij.lang.xpath.completion.ElementProvider; import org.intellij.lang.xpath.completion.FunctionLookup; import org.intellij.lang.xpath.psi.XPathFunction; import org.intellij.lang.xpath.xslt.XsltSupport; import org.intellij.lang.xpath.xslt.psi.XsltElement; import org.intellij.lang.xpath.xslt.psi.impl.XsltLanguage; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; import org.jdom.transform.JDOMSource; import org.jdom.xpath.XPath; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.xml.transform.Templates; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import java.io.IOException; import java.io.StringWriter; import java.lang.ref.SoftReference; import java.net.URL; import java.util.Collections; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; public class XsltDocumentationProvider implements DocumentationProvider { private SoftReference<Templates> myTemplates; private SoftReference<Document> myDocument; @Nullable public List<String> getUrlFor(PsiElement psiElement, PsiElement psiElement1) { if (psiElement instanceof XsltElement) return null; final String category; final String name; final XmlTag tag = getTag(psiElement1); if (tag != null) { name = tag.getLocalName(); category = "element"; } else if (psiElement instanceof XPathFunction) { name = ((XPathFunction)psiElement).getName(); category = "function"; } else if (psiElement instanceof DocElement) { name = ((DocElement)psiElement).getName(); category = ((DocElement)psiElement).getCategory(); } else { return null; } try { final Document document = getDocumentationDocument(); final XPath xPath = XPath.newInstance("//x:" + category + "[@name = '" + name + "']"); xPath.addNamespace("x", document.getRootElement().getNamespaceURI()); final Element e = (Element)xPath.selectSingleNode(document); if (e != null) { return Collections.singletonList(e.getParentElement().getAttributeValue("base") + e.getAttributeValue("href")); } } catch (Exception e) { Logger.getInstance(getClass().getName()).error(e); } return null; } @Nullable public String generateDoc(PsiElement psiElement, PsiElement psiElement1) { if (psiElement instanceof DocElement) { final DocElement element = (DocElement)psiElement; return getDocumentation(element.getName(), element.getCategory()); } if (psiElement instanceof XsltElement) { final XmlTag t = ((XsltElement)psiElement).getTag(); PsiElement p = t.getPrevSibling(); while (p instanceof PsiWhiteSpace || p instanceof XmlText) { p = p.getPrevSibling(); } if (p instanceof XmlComment) { final String commentText = XmlUtil.getCommentText((XmlComment)p); return commentText != null ? commentText.replaceAll("&", "&amp;").replaceAll("<", "&lt;") : null; } else { return null; } } final XmlTag tag = getTag(psiElement1); if (tag != null) { return getDocumentation(tag.getLocalName(), "element"); } else if (psiElement instanceof XPathFunction) { return getDocumentation(((XPathFunction)psiElement).getName(), "function"); } return null; } private static final Pattern check = Pattern.compile("x:found=\"(true|false)\""); @Nullable private String getDocumentation(String name, String type) { try { final Transformer transformer = getTemplate().newTransformer(); transformer.setParameter("element", name); transformer.setParameter("type", type); final StringWriter writer = new StringWriter(); transformer.transform(new JDOMSource(getDocumentationDocument()), new StreamResult(writer)); final String s = writer.toString(); final Matcher matcher = check.matcher(s); if (matcher.find()) { if (matcher.group(1).equals("true")) { return s.replaceFirst("<META.+?>", ""); } } } catch (Exception e) { Logger.getInstance(getClass().getName()).error(e); } return null; } @Nullable private static XmlTag getTag(PsiElement psiElement1) { if (psiElement1 == null) return null; final PsiElement element; if (psiElement1.getParent() instanceof XmlAttribute) { final XmlAttribute xmlAttribute = ((XmlAttribute)psiElement1.getParent()); element = xmlAttribute.getParent(); } else { element = psiElement1.getParent(); } if (element instanceof XmlTag) { final XmlTag tag = (XmlTag)element; if (XsltSupport.isXsltTag(tag)) { return tag; } } return null; } private Document getDocumentationDocument() throws IOException, JDOMException { Document d; if (myDocument == null || ((d = myDocument.get()) == null)) { d = new SAXBuilder().build(XsltSupport.class.getResource("resources/documentation.xml")); myDocument = new SoftReference<Document>(d); } return d; } private Templates getTemplate() throws TransformerConfigurationException, IOException { Templates t; if (myTemplates == null || (t = myTemplates.get()) == null) { t = TransformerFactory.newInstance().newTemplates(makeSource("resources/documentation.xsl")); myTemplates = new SoftReference<Templates>(t); } return t; } private StreamSource makeSource(String name) throws IOException { final URL resource = XsltSupport.class.getResource(name); return new StreamSource(resource.openStream(), resource.toExternalForm()); } @Nullable public PsiElement getDocumentationElementForLookupItem(PsiManager mgr, Object object, PsiElement psiElement) { if (object instanceof String) { if (psiElement instanceof XmlElement) { final XmlTag tag = PsiTreeUtil.getParentOfType(psiElement, XmlTag.class); if (tag != null && XsltSupport.XSLT_NS.equals(tag.getNamespace())) { final String prefix = tag.getNamespacePrefix(); if (prefix.length() == 0) { return new DocElement(mgr, psiElement, "element", (String)object); } else if (StringUtil.startsWithConcatenationOf(((String)object), prefix, ":")) { return new DocElement(mgr, psiElement, "element", ((String)object).substring(prefix.length() + 1)); } } } } if (object instanceof FunctionLookup) { final FunctionLookup lookup = ((FunctionLookup)object); return new DocElement(mgr, psiElement, "function", lookup.getName()); } else if (object instanceof ElementProvider) { return ((ElementProvider)object).getElement(); } if (object instanceof XsltElement) { return (PsiElement)object; } return null; } @Nullable public PsiElement getDocumentationElementForLink(PsiManager mgr, String string, PsiElement psiElement) { final String[] strings = string.split("\\$"); if (strings.length == 2) { return new DocElement(mgr, psiElement, strings[0], strings[1]); } return null; } @Nullable public String getQuickNavigateInfo(PsiElement element) { return null; } static class DocElement extends LightElement implements PsiNamedElement { private final PsiElement myElement; private final String myCategory; private final String myName; public DocElement(PsiManager mgr, PsiElement element, String category, String name) { super(mgr, XsltLanguage.INSTANCE); myElement = element; myCategory = category; myName = name; } public String getCategory() { return myCategory; } public PsiElement setName(@NotNull @NonNls String name) throws IncorrectOperationException { throw new IncorrectOperationException("Unsupported"); } public String getName() { return myName; } public String toString() { return "DocElement"; } @SuppressWarnings({"ConstantConditions"}) public String getText() { return null; } public void accept(@NotNull PsiElementVisitor visitor) { } public PsiElement copy() { return this; } @Override public boolean isValid() { return myElement != null && myElement.isValid(); } @Nullable public PsiFile getContainingFile() { if (!isValid()) { return null; } PsiFile file = myElement.getContainingFile(); final PsiElement context = myElement.getContext(); if (file == null && context != null) { file = context.getContainingFile(); } PsiFile f; if ((f = PsiTreeUtil.getContextOfType(file, PsiFile.class, true)) instanceof XmlFile) { return f; } return file; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import com.google.common.base.Preconditions; import org.apache.commons.io.Charsets; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.DirectoryListingStartAfterNotFoundException; import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.fs.InvalidPathException; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.FsPermissionExtension; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.SnapshotException; import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectorySnapshottableFeature; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.util.ReadOnlyList; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Arrays; class FSDirStatAndListingOp { static DirectoryListing getListingInt(FSDirectory fsd, final String srcArg, byte[] startAfter, boolean needLocation) throws IOException { FSPermissionChecker pc = fsd.getPermissionChecker(); byte[][] pathComponents = FSDirectory .getPathComponentsForReservedPath(srcArg); final String startAfterString = new String(startAfter, Charsets.UTF_8); final String src = fsd.resolvePath(pc, srcArg, pathComponents); final INodesInPath iip = fsd.getINodesInPath(src, true); // Get file name when startAfter is an INodePath if (FSDirectory.isReservedName(startAfterString)) { byte[][] startAfterComponents = FSDirectory .getPathComponentsForReservedPath(startAfterString); try { String tmp = FSDirectory.resolvePath(src, startAfterComponents, fsd); byte[][] regularPath = INode.getPathComponents(tmp); startAfter = regularPath[regularPath.length - 1]; } catch (IOException e) { // Possibly the inode is deleted throw new DirectoryListingStartAfterNotFoundException( "Can't find startAfter " + startAfterString); } } boolean isSuperUser = true; if (fsd.isPermissionEnabled()) { if (iip.getLastINode() != null && iip.getLastINode().isDirectory()) { fsd.checkPathAccess(pc, iip, FsAction.READ_EXECUTE); } else { fsd.checkTraverse(pc, iip); } isSuperUser = pc.isSuperUser(); } return getListing(fsd, iip, src, startAfter, needLocation, isSuperUser); } /** * Get the file info for a specific file. * * @param srcArg The string representation of the path to the file * @param resolveLink whether to throw UnresolvedLinkException * if src refers to a symlink * * @return object containing information regarding the file * or null if file not found */ static HdfsFileStatus getFileInfo( FSDirectory fsd, String srcArg, boolean resolveLink) throws IOException { String src = srcArg; if (!DFSUtil.isValidName(src)) { throw new InvalidPathException("Invalid file name: " + src); } FSPermissionChecker pc = fsd.getPermissionChecker(); byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src); src = fsd.resolvePath(pc, src, pathComponents); final INodesInPath iip = fsd.getINodesInPath(src, resolveLink); boolean isSuperUser = true; if (fsd.isPermissionEnabled()) { fsd.checkPermission(pc, iip, false, null, null, null, null, false); isSuperUser = pc.isSuperUser(); } return getFileInfo(fsd, src, resolveLink, FSDirectory.isReservedRawName(srcArg), isSuperUser); } /** * Returns true if the file is closed */ static boolean isFileClosed(FSDirectory fsd, String src) throws IOException { FSPermissionChecker pc = fsd.getPermissionChecker(); byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src); src = fsd.resolvePath(pc, src, pathComponents); final INodesInPath iip = fsd.getINodesInPath(src, true); if (fsd.isPermissionEnabled()) { fsd.checkTraverse(pc, iip); } return !INodeFile.valueOf(iip.getLastINode(), src).isUnderConstruction(); } static ContentSummary getContentSummary( FSDirectory fsd, String src) throws IOException { byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src); FSPermissionChecker pc = fsd.getPermissionChecker(); src = fsd.resolvePath(pc, src, pathComponents); final INodesInPath iip = fsd.getINodesInPath(src, false); if (fsd.isPermissionEnabled()) { fsd.checkPermission(pc, iip, false, null, null, null, FsAction.READ_EXECUTE); } return getContentSummaryInt(fsd, iip); } private static byte getStoragePolicyID(byte inodePolicy, byte parentPolicy) { return inodePolicy != BlockStoragePolicySuite.ID_UNSPECIFIED ? inodePolicy : parentPolicy; } /** * Get a partial listing of the indicated directory * * We will stop when any of the following conditions is met: * 1) this.lsLimit files have been added * 2) needLocation is true AND enough files have been added such * that at least this.lsLimit block locations are in the response * * @param fsd FSDirectory * @param iip the INodesInPath instance containing all the INodes along the * path * @param src the directory name * @param startAfter the name to start listing after * @param needLocation if block locations are returned * @return a partial listing starting after startAfter */ private static DirectoryListing getListing(FSDirectory fsd, INodesInPath iip, String src, byte[] startAfter, boolean needLocation, boolean isSuperUser) throws IOException { String srcs = FSDirectory.normalizePath(src); final boolean isRawPath = FSDirectory.isReservedRawName(src); fsd.readLock(); try { if (srcs.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR)) { return getSnapshotsListing(fsd, srcs, startAfter); } final int snapshot = iip.getPathSnapshotId(); final INode targetNode = iip.getLastINode(); if (targetNode == null) return null; byte parentStoragePolicy = isSuperUser ? targetNode.getStoragePolicyID() : BlockStoragePolicySuite .ID_UNSPECIFIED; if (!targetNode.isDirectory()) { return new DirectoryListing( new HdfsFileStatus[]{createFileStatus(fsd, src, HdfsFileStatus.EMPTY_NAME, targetNode, needLocation, parentStoragePolicy, snapshot, isRawPath, iip)}, 0); } final INodeDirectory dirInode = targetNode.asDirectory(); final ReadOnlyList<INode> contents = dirInode.getChildrenList(snapshot); int startChild = INodeDirectory.nextChild(contents, startAfter); int totalNumChildren = contents.size(); int numOfListing = Math.min(totalNumChildren - startChild, fsd.getLsLimit()); int locationBudget = fsd.getLsLimit(); int listingCnt = 0; HdfsFileStatus listing[] = new HdfsFileStatus[numOfListing]; for (int i=0; i<numOfListing && locationBudget>0; i++) { INode cur = contents.get(startChild+i); byte curPolicy = isSuperUser && !cur.isSymlink()? cur.getLocalStoragePolicyID(): BlockStoragePolicySuite.ID_UNSPECIFIED; listing[i] = createFileStatus(fsd, src, cur.getLocalNameBytes(), cur, needLocation, getStoragePolicyID(curPolicy, parentStoragePolicy), snapshot, isRawPath, iip); listingCnt++; if (needLocation) { // Once we hit lsLimit locations, stop. // This helps to prevent excessively large response payloads. // Approximate #locations with locatedBlockCount() * repl_factor LocatedBlocks blks = ((HdfsLocatedFileStatus)listing[i]).getBlockLocations(); locationBudget -= (blks == null) ? 0 : blks.locatedBlockCount() * listing[i].getReplication(); } } // truncate return array if necessary if (listingCnt < numOfListing) { listing = Arrays.copyOf(listing, listingCnt); } return new DirectoryListing( listing, totalNumChildren-startChild-listingCnt); } finally { fsd.readUnlock(); } } /** * Get a listing of all the snapshots of a snapshottable directory */ private static DirectoryListing getSnapshotsListing( FSDirectory fsd, String src, byte[] startAfter) throws IOException { Preconditions.checkState(fsd.hasReadLock()); Preconditions.checkArgument( src.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR), "%s does not end with %s", src, HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR); final String dirPath = FSDirectory.normalizePath(src.substring(0, src.length() - HdfsConstants.DOT_SNAPSHOT_DIR.length())); final INode node = fsd.getINode(dirPath); final INodeDirectory dirNode = INodeDirectory.valueOf(node, dirPath); final DirectorySnapshottableFeature sf = dirNode.getDirectorySnapshottableFeature(); if (sf == null) { throw new SnapshotException( "Directory is not a snapshottable directory: " + dirPath); } final ReadOnlyList<Snapshot> snapshots = sf.getSnapshotList(); int skipSize = ReadOnlyList.Util.binarySearch(snapshots, startAfter); skipSize = skipSize < 0 ? -skipSize - 1 : skipSize + 1; int numOfListing = Math.min(snapshots.size() - skipSize, fsd.getLsLimit()); final HdfsFileStatus listing[] = new HdfsFileStatus[numOfListing]; for (int i = 0; i < numOfListing; i++) { Snapshot.Root sRoot = snapshots.get(i + skipSize).getRoot(); listing[i] = createFileStatus(fsd, src, sRoot.getLocalNameBytes(), sRoot, BlockStoragePolicySuite.ID_UNSPECIFIED, Snapshot.CURRENT_STATE_ID, false, INodesInPath.fromINode(sRoot)); } return new DirectoryListing( listing, snapshots.size() - skipSize - numOfListing); } /** Get the file info for a specific file. * @param fsd FSDirectory * @param src The string representation of the path to the file * @param isRawPath true if a /.reserved/raw pathname was passed by the user * @param includeStoragePolicy whether to include storage policy * @return object containing information regarding the file * or null if file not found */ static HdfsFileStatus getFileInfo( FSDirectory fsd, String path, INodesInPath src, boolean isRawPath, boolean includeStoragePolicy) throws IOException { fsd.readLock(); try { final INode i = src.getLastINode(); byte policyId = includeStoragePolicy && i != null && !i.isSymlink() ? i.getStoragePolicyID() : BlockStoragePolicySuite.ID_UNSPECIFIED; return i == null ? null : createFileStatus( fsd, path, HdfsFileStatus.EMPTY_NAME, i, policyId, src.getPathSnapshotId(), isRawPath, src); } finally { fsd.readUnlock(); } } static HdfsFileStatus getFileInfo( FSDirectory fsd, String src, boolean resolveLink, boolean isRawPath, boolean includeStoragePolicy) throws IOException { String srcs = FSDirectory.normalizePath(src); if (srcs.endsWith(HdfsConstants.SEPARATOR_DOT_SNAPSHOT_DIR)) { if (fsd.getINode4DotSnapshot(srcs) != null) { return new HdfsFileStatus(0, true, 0, 0, 0, 0, null, null, null, null, HdfsFileStatus.EMPTY_NAME, -1L, 0, null, BlockStoragePolicySuite.ID_UNSPECIFIED); } return null; } fsd.readLock(); try { final INodesInPath iip = fsd.getINodesInPath(srcs, resolveLink); return getFileInfo(fsd, src, iip, isRawPath, includeStoragePolicy); } finally { fsd.readUnlock(); } } /** * Currently we only support "ls /xxx/.snapshot" which will return all the * snapshots of a directory. The FSCommand Ls will first call getFileInfo to * make sure the file/directory exists (before the real getListing call). * Since we do not have a real INode for ".snapshot", we return an empty * non-null HdfsFileStatus here. */ private static HdfsFileStatus getFileInfo4DotSnapshot( FSDirectory fsd, String src) throws UnresolvedLinkException { if (fsd.getINode4DotSnapshot(src) != null) { return new HdfsFileStatus(0, true, 0, 0, 0, 0, null, null, null, null, HdfsFileStatus.EMPTY_NAME, -1L, 0, null, BlockStoragePolicySuite.ID_UNSPECIFIED); } return null; } /** * create an hdfs file status from an inode * * @param fsd FSDirectory * @param path the local name * @param node inode * @param needLocation if block locations need to be included or not * @param isRawPath true if this is being called on behalf of a path in * /.reserved/raw * @return a file status * @throws java.io.IOException if any error occurs */ static HdfsFileStatus createFileStatus( FSDirectory fsd, String fullPath, byte[] path, INode node, boolean needLocation, byte storagePolicy, int snapshot, boolean isRawPath, INodesInPath iip) throws IOException { if (needLocation) { return createLocatedFileStatus(fsd, fullPath, path, node, storagePolicy, snapshot, isRawPath, iip); } else { return createFileStatus(fsd, fullPath, path, node, storagePolicy, snapshot, isRawPath, iip); } } /** * Create FileStatus by file INode */ static HdfsFileStatus createFileStatus( FSDirectory fsd, String fullPath, byte[] path, INode node, byte storagePolicy, int snapshot, boolean isRawPath, INodesInPath iip) throws IOException { long size = 0; // length is zero for directories short replication = 0; long blocksize = 0; final boolean isEncrypted; final FileEncryptionInfo feInfo = isRawPath ? null : fsd.getFileEncryptionInfo(node, snapshot, iip); if (node.isFile()) { final INodeFile fileNode = node.asFile(); size = fileNode.computeFileSize(snapshot); replication = fileNode.getFileReplication(snapshot); blocksize = fileNode.getPreferredBlockSize(); isEncrypted = (feInfo != null) || (isRawPath && fsd.isInAnEZ(INodesInPath.fromINode(node))); } else { isEncrypted = fsd.isInAnEZ(INodesInPath.fromINode(node)); } int childrenNum = node.isDirectory() ? node.asDirectory().getChildrenNum(snapshot) : 0; INodeAttributes nodeAttrs = fsd.getAttributes(fullPath, path, node, snapshot); return new HdfsFileStatus( size, node.isDirectory(), replication, blocksize, node.getModificationTime(snapshot), node.getAccessTime(snapshot), getPermissionForFileStatus(nodeAttrs, isEncrypted), nodeAttrs.getUserName(), nodeAttrs.getGroupName(), node.isSymlink() ? node.asSymlink().getSymlink() : null, path, node.getId(), childrenNum, feInfo, storagePolicy); } /** * Create FileStatus with location info by file INode */ private static HdfsLocatedFileStatus createLocatedFileStatus( FSDirectory fsd, String fullPath, byte[] path, INode node, byte storagePolicy, int snapshot, boolean isRawPath, INodesInPath iip) throws IOException { assert fsd.hasReadLock(); long size = 0; // length is zero for directories short replication = 0; long blocksize = 0; LocatedBlocks loc = null; final boolean isEncrypted; final FileEncryptionInfo feInfo = isRawPath ? null : fsd.getFileEncryptionInfo(node, snapshot, iip); if (node.isFile()) { final INodeFile fileNode = node.asFile(); size = fileNode.computeFileSize(snapshot); replication = fileNode.getFileReplication(snapshot); blocksize = fileNode.getPreferredBlockSize(); final boolean inSnapshot = snapshot != Snapshot.CURRENT_STATE_ID; final boolean isUc = !inSnapshot && fileNode.isUnderConstruction(); final long fileSize = !inSnapshot && isUc ? fileNode.computeFileSizeNotIncludingLastUcBlock() : size; loc = fsd.getFSNamesystem().getBlockManager().createLocatedBlocks( fileNode.getBlocks(snapshot), fileSize, isUc, 0L, size, false, inSnapshot, feInfo); if (loc == null) { loc = new LocatedBlocks(); } isEncrypted = (feInfo != null) || (isRawPath && fsd.isInAnEZ(INodesInPath.fromINode(node))); } else { isEncrypted = fsd.isInAnEZ(INodesInPath.fromINode(node)); } int childrenNum = node.isDirectory() ? node.asDirectory().getChildrenNum(snapshot) : 0; INodeAttributes nodeAttrs = fsd.getAttributes(fullPath, path, node, snapshot); HdfsLocatedFileStatus status = new HdfsLocatedFileStatus(size, node.isDirectory(), replication, blocksize, node.getModificationTime(snapshot), node.getAccessTime(snapshot), getPermissionForFileStatus(nodeAttrs, isEncrypted), nodeAttrs.getUserName(), nodeAttrs.getGroupName(), node.isSymlink() ? node.asSymlink().getSymlink() : null, path, node.getId(), loc, childrenNum, feInfo, storagePolicy); // Set caching information for the located blocks. if (loc != null) { CacheManager cacheManager = fsd.getFSNamesystem().getCacheManager(); for (LocatedBlock lb: loc.getLocatedBlocks()) { cacheManager.setCachedLocations(lb); } } return status; } /** * Returns an inode's FsPermission for use in an outbound FileStatus. If the * inode has an ACL or is for an encrypted file/dir, then this method will * return an FsPermissionExtension. * * @param node INode to check * @param snapshot int snapshot ID * @param isEncrypted boolean true if the file/dir is encrypted * @return FsPermission from inode, with ACL bit on if the inode has an ACL * and encrypted bit on if it represents an encrypted file/dir. */ private static FsPermission getPermissionForFileStatus( INodeAttributes node, boolean isEncrypted) { FsPermission perm = node.getFsPermission(); boolean hasAcl = node.getAclFeature() != null; if (hasAcl || isEncrypted) { perm = new FsPermissionExtension(perm, hasAcl, isEncrypted); } return perm; } private static ContentSummary getContentSummaryInt(FSDirectory fsd, INodesInPath iip) throws IOException { fsd.readLock(); try { INode targetNode = iip.getLastINode(); if (targetNode == null) { throw new FileNotFoundException("File does not exist: " + iip.getPath()); } else { // Make it relinquish locks everytime contentCountLimit entries are // processed. 0 means disabled. I.e. blocking for the entire duration. ContentSummaryComputationContext cscc = new ContentSummaryComputationContext(fsd, fsd.getFSNamesystem(), fsd.getContentCountLimit(), fsd.getContentSleepMicroSec()); ContentSummary cs = targetNode.computeAndConvertContentSummary(cscc); fsd.addYieldCount(cscc.getYieldCount()); return cs; } } finally { fsd.readUnlock(); } } }
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.hc.core5.http.impl.io; import java.io.IOException; import org.apache.hc.core5.annotation.Contract; import org.apache.hc.core5.annotation.ThreadingBehavior; import org.apache.hc.core5.http.ClassicHttpRequest; import org.apache.hc.core5.http.ClassicHttpResponse; import org.apache.hc.core5.http.ConnectionReuseStrategy; import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.HttpEntity; import org.apache.hc.core5.http.HttpException; import org.apache.hc.core5.http.HttpHeaders; import org.apache.hc.core5.http.HttpStatus; import org.apache.hc.core5.http.HttpVersion; import org.apache.hc.core5.http.ProtocolException; import org.apache.hc.core5.http.ProtocolVersion; import org.apache.hc.core5.http.UnsupportedHttpVersionException; import org.apache.hc.core5.http.impl.DefaultConnectionReuseStrategy; import org.apache.hc.core5.http.impl.Http1StreamListener; import org.apache.hc.core5.http.io.HttpClientConnection; import org.apache.hc.core5.http.io.HttpResponseInformationCallback; import org.apache.hc.core5.http.message.MessageSupport; import org.apache.hc.core5.http.message.StatusLine; import org.apache.hc.core5.http.protocol.HttpContext; import org.apache.hc.core5.http.protocol.HttpCoreContext; import org.apache.hc.core5.http.protocol.HttpProcessor; import org.apache.hc.core5.util.Args; import org.apache.hc.core5.io.Closer; /** * {@code HttpRequestExecutor} is a client side HTTP protocol handler based * on the blocking (classic) I/O model. * <p> * {@code HttpRequestExecutor} relies on {@link HttpProcessor} to generate * mandatory protocol headers for all outgoing messages and apply common, * cross-cutting message transformations to all incoming and outgoing messages. * Application specific processing can be implemented outside * {@code HttpRequestExecutor} once the request has been executed and * a response has been received. * * @since 4.0 */ @Contract(threading = ThreadingBehavior.IMMUTABLE) public class HttpRequestExecutor { public static final int DEFAULT_WAIT_FOR_CONTINUE = 3000; private final int waitForContinue; private final ConnectionReuseStrategy connReuseStrategy; private final Http1StreamListener streamListener; /** * Creates new instance of HttpRequestExecutor. * * @since 4.3 */ public HttpRequestExecutor( final int waitForContinue, final ConnectionReuseStrategy connReuseStrategy, final Http1StreamListener streamListener) { super(); this.waitForContinue = Args.positive(waitForContinue, "Wait for continue time"); this.connReuseStrategy = connReuseStrategy != null ? connReuseStrategy : DefaultConnectionReuseStrategy.INSTANCE; this.streamListener = streamListener; } public HttpRequestExecutor(final ConnectionReuseStrategy connReuseStrategy) { this(DEFAULT_WAIT_FOR_CONTINUE, connReuseStrategy, null); } public HttpRequestExecutor() { this(DEFAULT_WAIT_FOR_CONTINUE, null, null); } /** * Sends the request and obtain a response. * * @param request the request to execute. * @param conn the connection over which to execute the request. * @param informationCallback callback to execute upon receipt of information status (1xx). * May be null. * @param context the context * @return the response to the request. * * @throws IOException in case of an I/O error. * @throws HttpException in case of HTTP protocol violation or a processing * problem. */ public ClassicHttpResponse execute( final ClassicHttpRequest request, final HttpClientConnection conn, final HttpResponseInformationCallback informationCallback, final HttpContext context) throws IOException, HttpException { Args.notNull(request, "HTTP request"); Args.notNull(conn, "Client connection"); Args.notNull(context, "HTTP context"); try { context.setAttribute(HttpCoreContext.SSL_SESSION, conn.getSSLSession()); context.setAttribute(HttpCoreContext.CONNECTION_ENDPOINT, conn.getEndpointDetails()); final ProtocolVersion transportVersion = request.getVersion(); if (transportVersion != null) { if (transportVersion.greaterEquals(HttpVersion.HTTP_2)) { throw new UnsupportedHttpVersionException(transportVersion); } context.setProtocolVersion(transportVersion); } conn.sendRequestHeader(request); if (streamListener != null) { streamListener.onRequestHead(conn, request); } boolean expectContinue = false; final HttpEntity entity = request.getEntity(); if (entity != null) { final Header expect = request.getFirstHeader(HttpHeaders.EXPECT); expectContinue = expect != null && "100-continue".equalsIgnoreCase(expect.getValue()); if (!expectContinue) { conn.sendRequestEntity(request); } } conn.flush(); ClassicHttpResponse response = null; while (response == null) { if (expectContinue) { if (conn.isDataAvailable(this.waitForContinue)) { response = conn.receiveResponseHeader(); if (streamListener != null) { streamListener.onResponseHead(conn, response); } final int status = response.getCode(); if (status == HttpStatus.SC_CONTINUE) { // discard 100-continue response = null; conn.sendRequestEntity(request); } else if (status < HttpStatus.SC_SUCCESS) { if (informationCallback != null) { informationCallback.execute(response, conn, context); } response = null; continue; } else if (status >= HttpStatus.SC_CLIENT_ERROR){ conn.terminateRequest(request); } else { conn.sendRequestEntity(request); } } else { conn.sendRequestEntity(request); } conn.flush(); expectContinue = false; } else { response = conn.receiveResponseHeader(); if (streamListener != null) { streamListener.onResponseHead(conn, response); } final int status = response.getCode(); if (status < HttpStatus.SC_INFORMATIONAL) { throw new ProtocolException("Invalid response: " + new StatusLine(response)); } if (status < HttpStatus.SC_SUCCESS) { if (informationCallback != null && status != HttpStatus.SC_CONTINUE) { informationCallback.execute(response, conn, context); } response = null; } } } if (MessageSupport.canResponseHaveBody(request.getMethod(), response)) { conn.receiveResponseEntity(response); } return response; } catch (final HttpException | IOException | RuntimeException ex) { Closer.closeQuietly(conn); throw ex; } } /** * Sends the request and obtain a response. * * @param request the request to execute. * @param conn the connection over which to execute the request. * @param context the context * @return the response to the request. * * @throws IOException in case of an I/O error. * @throws HttpException in case of HTTP protocol violation or a processing * problem. */ public ClassicHttpResponse execute( final ClassicHttpRequest request, final HttpClientConnection conn, final HttpContext context) throws IOException, HttpException { return execute(request, conn, null, context); } /** * Pre-process the given request using the given protocol processor and * initiates the process of request execution. * * @param request the request to prepare * @param processor the processor to use * @param context the context for sending the request * * @throws IOException in case of an I/O error. * @throws HttpException in case of HTTP protocol violation or a processing * problem. */ public void preProcess( final ClassicHttpRequest request, final HttpProcessor processor, final HttpContext context) throws HttpException, IOException { Args.notNull(request, "HTTP request"); Args.notNull(processor, "HTTP processor"); Args.notNull(context, "HTTP context"); context.setAttribute(HttpCoreContext.HTTP_REQUEST, request); processor.process(request, request.getEntity(), context); } /** * Post-processes the given response using the given protocol processor and * completes the process of request execution. * <p> * This method does <i>not</i> read the response entity, if any. * The connection over which content of the response entity is being * streamed from cannot be reused until the response entity has been * fully consumed. * * @param response the response object to post-process * @param processor the processor to use * @param context the context for post-processing the response * * @throws IOException in case of an I/O error. * @throws HttpException in case of HTTP protocol violation or a processing * problem. */ public void postProcess( final ClassicHttpResponse response, final HttpProcessor processor, final HttpContext context) throws HttpException, IOException { Args.notNull(response, "HTTP response"); Args.notNull(processor, "HTTP processor"); Args.notNull(context, "HTTP context"); final ProtocolVersion transportVersion = response.getVersion(); if (transportVersion != null) { context.setProtocolVersion(transportVersion); } context.setAttribute(HttpCoreContext.HTTP_RESPONSE, response); processor.process(response, response.getEntity(), context); } /** * Determines whether the connection can be kept alive and is safe to be re-used for subsequent message exchanges. * * @param request current request object. * @param response current response object. * @param connection actual connection. * @param context current context. * @return {@code true} is the connection can be kept-alive and re-used. * @throws IOException in case of an I/O error. */ public boolean keepAlive( final ClassicHttpRequest request, final ClassicHttpResponse response, final HttpClientConnection connection, final HttpContext context) throws IOException { Args.notNull(connection, "HTTP connection"); Args.notNull(request, "HTTP request"); Args.notNull(response, "HTTP response"); Args.notNull(context, "HTTP context"); final boolean keepAlive = connection.isConsistent() && connReuseStrategy.keepAlive(request, response, context); if (streamListener != null) { streamListener.onExchangeComplete(connection, keepAlive); } return keepAlive; } }
/* * Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.eclipse.elasticbeanstalk.webproject; import static com.amazonaws.eclipse.core.util.JavaProjectUtils.setDefaultJreToProjectClasspath; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.net.URL; import java.util.List; import org.apache.commons.io.FileUtils; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.FileLocator; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.SubMonitor; import org.eclipse.jdt.core.IJavaProject; import org.eclipse.jdt.core.JavaCore; import org.eclipse.jface.operation.IRunnableWithProgress; import org.eclipse.swt.widgets.Display; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.browser.IWebBrowser; import org.eclipse.ui.browser.IWorkbenchBrowserSupport; import org.eclipse.wst.common.componentcore.ComponentCore; import org.eclipse.wst.common.componentcore.resources.IVirtualComponent; import org.eclipse.wst.common.componentcore.resources.IVirtualFolder; import org.osgi.framework.Bundle; import com.amazonaws.eclipse.core.AccountInfo; import com.amazonaws.eclipse.core.AwsToolkitCore; import com.amazonaws.eclipse.core.maven.MavenFactory; import com.amazonaws.eclipse.core.model.MavenConfigurationDataModel; import com.amazonaws.eclipse.core.util.BundleUtils; import com.amazonaws.eclipse.core.validator.JavaPackageName; import com.amazonaws.eclipse.elasticbeanstalk.ElasticBeanstalkPlugin; /** * Runnable (with progress) that creates a new AWS Java web project, based on * the configured data model. This class is responsible for creating the WTP * dynamic web project, adding and configuring the AWS SDK for Java, creating * the security credential configuration file and eventually configuring the WTP * runtime targeted by the new project. */ final class CreateNewAwsJavaWebProjectRunnable implements IRunnableWithProgress { private final NewAwsJavaWebProjectDataModel dataModel; private static final IWorkbenchBrowserSupport BROWSER_SUPPORT = PlatformUI.getWorkbench().getBrowserSupport(); public CreateNewAwsJavaWebProjectRunnable(NewAwsJavaWebProjectDataModel dataModel) { this.dataModel = dataModel; } /* (non-Javadoc) * @see org.eclipse.jface.operation.IRunnableWithProgress#run(org.eclipse.core.runtime.IProgressMonitor) */ @Override public void run(IProgressMonitor progressMonitor) throws InvocationTargetException, InterruptedException { SubMonitor monitor = SubMonitor.convert(progressMonitor, "Creating new AWS Java web project", 100); try { IProject project = createBeanstalkProject( dataModel.getMavenConfigurationDataModel(), monitor); IJavaProject javaProject = JavaCore.create(project); setDefaultJreToProjectClasspath(javaProject, monitor); monitor.worked(20); addTemplateFiles(project); monitor.worked(10); // Configure the Tomcat session manager if (dataModel.getUseDynamoDBSessionManagement()) { addSessionManagerConfigurationFiles(project); } monitor.worked(10); if (dataModel.getProjectTemplate() == JavaWebProjectTemplate.DEFAULT) { // Open the readme.html in an editor browser window. File root = project.getLocation().toFile(); final File indexHtml = new File(root, "src/main/webapp/index.html"); // Internal browser must be opened within UI thread Display.getDefault().syncExec(new Runnable() { @Override public void run() { try { IWebBrowser browser = BROWSER_SUPPORT.createBrowser( IWorkbenchBrowserSupport.AS_EDITOR, null, null, null); browser.openURL(indexHtml.toURI().toURL()); } catch (Exception e) { ElasticBeanstalkPlugin .getDefault() .logError( "Failed to open project index page in Eclipse editor.", e); } } }); } } catch (Exception e) { throw new InvocationTargetException(e); } finally { progressMonitor.done(); } } private void addSessionManagerConfigurationFiles(IProject project) throws IOException, CoreException { Bundle bundle = ElasticBeanstalkPlugin.getDefault().getBundle(); URL url = FileLocator.resolve(bundle.getEntry("/")); IPath templateRoot = new Path(url.getFile(), "templates"); FileUtils.copyDirectory( templateRoot.append("dynamodb-session-manager").toFile(), project.getLocation().toFile()); // Add the user's credentials to context.xml File localContextXml = project.getLocation() .append(".ebextensions") .append("context.xml").toFile(); AccountInfo accountInfo = AwsToolkitCore.getDefault().getAccountManager().getAccountInfo(dataModel.getAccountId()); String contextContents = FileUtils.readFileToString(localContextXml); contextContents = contextContents.replace("{ACCESS_KEY}", accountInfo.getAccessKey()); contextContents = contextContents.replace("{SECRET_KEY}", accountInfo.getSecretKey()); FileUtils.writeStringToFile(localContextXml, contextContents); project.refreshLocal(IResource.DEPTH_INFINITE, null); // Update the J2EE Deployment Assembly by creating a link from the '/.ebextensions' // folder to the '/WEB-INF/.ebextensions' folder in the web assembly mapping for WTP IVirtualComponent rootComponent = ComponentCore.createComponent(project); IVirtualFolder rootFolder = rootComponent.getRootFolder(); try { Path source = new Path("/.ebextensions"); Path target = new Path("/WEB-INF/.ebextensions"); IVirtualFolder subFolder = rootFolder.getFolder(target); subFolder.createLink(source, 0, null); } catch( CoreException ce ) { String message = "Unable to configure deployment assembly to map .ebextension directory"; ElasticBeanstalkPlugin.getDefault().logError(message, ce); } } private IProject createBeanstalkProject(MavenConfigurationDataModel mavenConfig, IProgressMonitor monitor) throws CoreException, IOException { List<IProject> projects = MavenFactory.createArchetypeProject( "org.apache.maven.archetypes", "maven-archetype-webapp", "1.0", mavenConfig.getGroupId(), mavenConfig.getArtifactId(), mavenConfig.getVersion(), mavenConfig.getPackageName(), monitor); // This archetype only has one project return projects.get(0); } private void addTemplateFiles(IProject project) throws IOException, CoreException { final String CREDENTIAL_PROFILE_PLACEHOLDER = "{CREDENTIAL_PROFILE}"; final String PACKAGE_NAME_PLACEHOLDER = "{PACKAGE_NAME}"; Bundle bundle = ElasticBeanstalkPlugin.getDefault().getBundle(); File templateRoot = null; try { templateRoot = BundleUtils.getFileFromBundle(bundle, "templates"); } catch (URISyntaxException e) { throw new RuntimeException("Failed to load templates from ElasticBeanstalk bundle.", e); } AccountInfo currentAccountInfo = AwsToolkitCore.getDefault().getAccountManager().getAccountInfo(dataModel.getAccountId()); File pomFile = project.getFile("pom.xml").getLocation().toFile(); MavenConfigurationDataModel mavenConfig = dataModel.getMavenConfigurationDataModel(); switch (dataModel.getProjectTemplate()) { case WORKER: replacePomFile(new File(templateRoot, "worker/pom.xml"), mavenConfig.getGroupId(), mavenConfig.getArtifactId(), mavenConfig.getVersion(), pomFile); String packageName = dataModel.getMavenConfigurationDataModel().getPackageName(); JavaPackageName javaPackageName = JavaPackageName.parse(packageName); IPath location = project.getFile(MavenFactory.getMavenSourceFolder()).getLocation(); for (String component : javaPackageName.getComponents()) { location = location.append(component); } FileUtils.copyDirectory(new File(templateRoot, "worker/src"), location.toFile()); File workerServlet = location.append("WorkerServlet.java").toFile(); replaceStringInFile(workerServlet, CREDENTIAL_PROFILE_PLACEHOLDER, currentAccountInfo.getAccountName()); replaceStringInFile(workerServlet, PACKAGE_NAME_PLACEHOLDER, packageName); File workerRequest = location.append("WorkRequest.java").toFile(); replaceStringInFile(workerRequest, PACKAGE_NAME_PLACEHOLDER, packageName); location = project.getFile("src/main/webapp").getLocation(); FileUtils.copyDirectory( new File(templateRoot, "worker/WebContent/"), location.toFile()); File webXml = location.append("WEB-INF/web.xml").toFile(); replaceStringInFile(webXml, PACKAGE_NAME_PLACEHOLDER, packageName); break; case DEFAULT: replacePomFile(new File(templateRoot, "basic/pom.xml"), mavenConfig.getGroupId(), mavenConfig.getArtifactId(), mavenConfig.getVersion(), pomFile); location = project.getFile("src/main/webapp").getLocation(); FileUtils.copyDirectory( new File(templateRoot, "basic/WebContent"), location.toFile()); File indexJsp = location.append("index.jsp").toFile(); replaceStringInFile(indexJsp, CREDENTIAL_PROFILE_PLACEHOLDER, currentAccountInfo.getAccountName()); break; default: throw new IllegalStateException("Unknown project template: " + dataModel.getProjectTemplate()); } project.refreshLocal(IResource.DEPTH_INFINITE, null); } private String replacePomFile(File pomTemplate, String groupId, String artifactId, String version, File targetFile) throws IOException { final String GROUP_ID_PLACEHOLDER = "{GROUP_ID}"; final String ARTIFACT_ID_PLACEHOLDER = "{ARTIFACT_ID}"; final String VERSION_PLACEHOLDER = "{VERSION}"; String content = FileUtils.readFileToString(pomTemplate); content = content.replace(GROUP_ID_PLACEHOLDER, groupId) .replace(ARTIFACT_ID_PLACEHOLDER, artifactId) .replace(VERSION_PLACEHOLDER, version); FileUtils.writeStringToFile(targetFile, content); return content; } /** Replace source strings with target string and return the original content of the file. */ private String replaceStringInFile(File file, String source, String target) throws IOException { String originalContent = FileUtils.readFileToString(file); String replacedContent = originalContent.replace(source, target); FileUtils.writeStringToFile(file, replacedContent); return originalContent; } }
/* * Copyright 2013 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http; import io.netty.buffer.Unpooled; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.http.HttpHeaders.Names; import io.netty.util.CharsetUtil; import org.junit.Test; import java.util.List; import static org.hamcrest.CoreMatchers.*; import static org.junit.Assert.*; public class HttpResponseDecoderTest { @Test public void testResponseChunked() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); byte[] data = new byte[64]; for (int i = 0; i < data.length; i++) { data[i] = (byte) i; } for (int i = 0; i < 10; i++) { assertFalse(ch.writeInbound(Unpooled.copiedBuffer(Integer.toHexString(data.length) + "\r\n", CharsetUtil.US_ASCII))); assertTrue(ch.writeInbound(Unpooled.wrappedBuffer(data))); HttpContent content = (HttpContent) ch.readInbound(); assertEquals(data.length, content.content().readableBytes()); byte[] decodedData = new byte[data.length]; content.content().readBytes(decodedData); assertArrayEquals(data, decodedData); content.release(); assertFalse(ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII))); } // Write the last chunk. ch.writeInbound(Unpooled.copiedBuffer("0\r\n\r\n", CharsetUtil.US_ASCII)); // Ensure the last chunk was decoded. LastHttpContent content = (LastHttpContent) ch.readInbound(); assertFalse(content.content().isReadable()); content.release(); ch.finish(); assertNull(ch.readInbound()); } @Test public void testResponseChunkedExceedMaxChunkSize() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder(4096, 8192, 32)); ch.writeInbound( Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); byte[] data = new byte[64]; for (int i = 0; i < data.length; i++) { data[i] = (byte) i; } for (int i = 0; i < 10; i++) { assertFalse(ch.writeInbound(Unpooled.copiedBuffer(Integer.toHexString(data.length) + "\r\n", CharsetUtil.US_ASCII))); assertTrue(ch.writeInbound(Unpooled.wrappedBuffer(data))); byte[] decodedData = new byte[data.length]; HttpContent content = (HttpContent) ch.readInbound(); assertEquals(32, content.content().readableBytes()); content.content().readBytes(decodedData, 0, 32); content.release(); content = (HttpContent) ch.readInbound(); assertEquals(32, content.content().readableBytes()); content.content().readBytes(decodedData, 32, 32); assertArrayEquals(data, decodedData); content.release(); assertFalse(ch.writeInbound(Unpooled.copiedBuffer("\r\n", CharsetUtil.US_ASCII))); } // Write the last chunk. ch.writeInbound(Unpooled.copiedBuffer("0\r\n\r\n", CharsetUtil.US_ASCII)); // Ensure the last chunk was decoded. LastHttpContent content = (LastHttpContent) ch.readInbound(); assertFalse(content.content().isReadable()); content.release(); ch.finish(); assertNull(ch.readInbound()); } @Test public void testClosureWithoutContentLength1() throws Exception { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n\r\n", CharsetUtil.US_ASCII)); // Read the response headers. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(ch.readInbound(), is(nullValue())); // Close the connection without sending anything. assertTrue(ch.finish()); // The decoder should still produce the last content. LastHttpContent content = (LastHttpContent) ch.readInbound(); assertThat(content.content().isReadable(), is(false)); content.release(); // But nothing more. assertThat(ch.readInbound(), is(nullValue())); } @Test public void testClosureWithoutContentLength2() throws Exception { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); // Write the partial response. ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n\r\n12345678", CharsetUtil.US_ASCII)); // Read the response headers. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); // Read the partial content. HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().toString(CharsetUtil.US_ASCII), is("12345678")); assertThat(content, is(not(instanceOf(LastHttpContent.class)))); content.release(); assertThat(ch.readInbound(), is(nullValue())); // Close the connection. assertTrue(ch.finish()); // The decoder should still produce the last content. LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertThat(lastContent.content().isReadable(), is(false)); lastContent.release(); // But nothing more. assertThat(ch.readInbound(), is(nullValue())); } @Test public void testPrematureClosureWithChunkedEncoding1() throws Exception { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound( Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n", CharsetUtil.US_ASCII)); // Read the response headers. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(res.headers().get(Names.TRANSFER_ENCODING), is("chunked")); assertThat(ch.readInbound(), is(nullValue())); // Close the connection without sending anything. ch.finish(); // The decoder should not generate the last chunk because it's closed prematurely. assertThat(ch.readInbound(), is(nullValue())); } @Test public void testPrematureClosureWithChunkedEncoding2() throws Exception { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); // Write the partial response. ch.writeInbound(Unpooled.copiedBuffer( "HTTP/1.1 200 OK\r\nTransfer-Encoding: chunked\r\n\r\n8\r\n12345678", CharsetUtil.US_ASCII)); // Read the response headers. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(res.headers().get(Names.TRANSFER_ENCODING), is("chunked")); // Read the partial content. HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().toString(CharsetUtil.US_ASCII), is("12345678")); assertThat(content, is(not(instanceOf(LastHttpContent.class)))); content.release(); assertThat(ch.readInbound(), is(nullValue())); // Close the connection. ch.finish(); // The decoder should not generate the last chunk because it's closed prematurely. assertThat(ch.readInbound(), is(nullValue())); } @Test public void testLastResponseWithEmptyHeaderAndEmptyContent() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(ch.readInbound(), is(nullValue())); assertThat(ch.finish(), is(true)); LastHttpContent content = (LastHttpContent) ch.readInbound(); assertThat(content.content().isReadable(), is(false)); content.release(); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testLastResponseWithoutContentLengthHeader() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer("HTTP/1.1 200 OK\r\n\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); assertThat(ch.readInbound(), is(nullValue())); ch.writeInbound(Unpooled.wrappedBuffer(new byte[1024])); HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().readableBytes(), is(1024)); content.release(); assertThat(ch.finish(), is(true)); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertThat(lastContent.content().isReadable(), is(false)); lastContent.release(); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testLastResponseWithTrailingHeader() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer( "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "0\r\n" + "Set-Cookie: t1=t1v1\r\n" + "Set-Cookie: t2=t2v2; Expires=Wed, 09-Jun-2021 10:18:14 GMT\r\n" + "\r\n", CharsetUtil.US_ASCII)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertThat(lastContent.content().isReadable(), is(false)); HttpHeaders headers = lastContent.trailingHeaders(); assertEquals(1, headers.names().size()); List<String> values = headers.getAll("Set-Cookie"); assertEquals(2, values.size()); assertTrue(values.contains("t1=t1v1")); assertTrue(values.contains("t2=t2v2; Expires=Wed, 09-Jun-2021 10:18:14 GMT")); lastContent.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testLastResponseWithTrailingHeaderFragmented() { byte[] data = ("HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "0\r\n" + "Set-Cookie: t1=t1v1\r\n" + "Set-Cookie: t2=t2v2; Expires=Wed, 09-Jun-2021 10:18:14 GMT\r\n" + "\r\n").getBytes(CharsetUtil.US_ASCII); for (int i = 1; i < data.length; i++) { testLastResponseWithTrailingHeaderFragmented(data, i); } } private static void testLastResponseWithTrailingHeaderFragmented(byte[] content, int fragmentSize) { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); int headerLength = 47; // split up the header for (int a = 0; a < headerLength;) { int amount = fragmentSize; if (a + amount > headerLength) { amount = headerLength - a; } // if header is done it should produce a HttpRequest boolean headerDone = a + amount == headerLength; assertEquals(headerDone, ch.writeInbound(Unpooled.wrappedBuffer(content, a, amount))); a += amount; } ch.writeInbound(Unpooled.wrappedBuffer(content, headerLength, content.length - headerLength)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertThat(lastContent.content().isReadable(), is(false)); HttpHeaders headers = lastContent.trailingHeaders(); assertEquals(1, headers.names().size()); List<String> values = headers.getAll("Set-Cookie"); assertEquals(2, values.size()); assertTrue(values.contains("t1=t1v1")); assertTrue(values.contains("t2=t2v2; Expires=Wed, 09-Jun-2021 10:18:14 GMT")); lastContent.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testResponseWithContentLength() { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.copiedBuffer( "HTTP/1.1 200 OK\r\n" + "Content-Length: 10\r\n" + "\r\n", CharsetUtil.US_ASCII)); byte[] data = new byte[10]; for (int i = 0; i < data.length; i++) { data[i] = (byte) i; } ch.writeInbound(Unpooled.wrappedBuffer(data, 0, data.length / 2)); ch.writeInbound(Unpooled.wrappedBuffer(data, 5, data.length / 2)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); HttpContent firstContent = (HttpContent) ch.readInbound(); assertThat(firstContent.content().readableBytes(), is(5)); assertEquals(Unpooled.wrappedBuffer(data, 0, 5), firstContent.content()); firstContent.release(); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertEquals(5, lastContent.content().readableBytes()); assertEquals(Unpooled.wrappedBuffer(data, 5, 5), lastContent.content()); lastContent.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testResponseWithContentLengthFragmented() { byte[] data = ("HTTP/1.1 200 OK\r\n" + "Content-Length: 10\r\n" + "\r\n").getBytes(CharsetUtil.US_ASCII); for (int i = 1; i < data.length; i++) { testResponseWithContentLengthFragmented(data, i); } } private static void testResponseWithContentLengthFragmented(byte[] header, int fragmentSize) { EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); // split up the header for (int a = 0; a < header.length;) { int amount = fragmentSize; if (a + amount > header.length) { amount = header.length - a; } ch.writeInbound(Unpooled.wrappedBuffer(header, a, amount)); a += amount; } byte[] data = new byte[10]; for (int i = 0; i < data.length; i++) { data[i] = (byte) i; } ch.writeInbound(Unpooled.wrappedBuffer(data, 0, data.length / 2)); ch.writeInbound(Unpooled.wrappedBuffer(data, 5, data.length / 2)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.OK)); HttpContent firstContent = (HttpContent) ch.readInbound(); assertThat(firstContent.content().readableBytes(), is(5)); assertEquals(Unpooled.wrappedBuffer(data, 0, 5), firstContent.content()); firstContent.release(); LastHttpContent lastContent = (LastHttpContent) ch.readInbound(); assertEquals(5, lastContent.content().readableBytes()); assertEquals(Unpooled.wrappedBuffer(data, 5, 5), lastContent.content()); lastContent.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } @Test public void testWebSocketResponse() { byte[] data = ("HTTP/1.1 101 WebSocket Protocol Handshake\r\n" + "Upgrade: WebSocket\r\n" + "Connection: Upgrade\r\n" + "Sec-WebSocket-Origin: http://localhost:8080\r\n" + "Sec-WebSocket-Location: ws://localhost/some/path\r\n" + "\r\n" + "1234567812345678").getBytes(); EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.wrappedBuffer(data)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.SWITCHING_PROTOCOLS)); HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().readableBytes(), is(16)); content.release(); assertThat(ch.finish(), is(false)); assertThat(ch.readInbound(), is(nullValue())); } // See https://github.com/netty/netty/issues/2173 @Test public void testWebSocketResponseWithDataFollowing() { byte[] data = ("HTTP/1.1 101 WebSocket Protocol Handshake\r\n" + "Upgrade: WebSocket\r\n" + "Connection: Upgrade\r\n" + "Sec-WebSocket-Origin: http://localhost:8080\r\n" + "Sec-WebSocket-Location: ws://localhost/some/path\r\n" + "\r\n" + "1234567812345678").getBytes(); byte[] otherData = {1, 2, 3, 4}; EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.wrappedBuffer(data, otherData)); HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_1)); assertThat(res.getStatus(), is(HttpResponseStatus.SWITCHING_PROTOCOLS)); HttpContent content = (HttpContent) ch.readInbound(); assertThat(content.content().readableBytes(), is(16)); content.release(); assertThat(ch.finish(), is(true)); assertEquals(ch.readInbound(), Unpooled.wrappedBuffer(otherData)); } @Test public void testGarbageHeaders() { // A response without headers - from https://github.com/netty/netty/issues/2103 byte[] data = ("<html>\r\n" + "<head><title>400 Bad Request</title></head>\r\n" + "<body bgcolor=\"white\">\r\n" + "<center><h1>400 Bad Request</h1></center>\r\n" + "<hr><center>nginx/1.1.19</center>\r\n" + "</body>\r\n" + "</html>\r\n").getBytes(); EmbeddedChannel ch = new EmbeddedChannel(new HttpResponseDecoder()); ch.writeInbound(Unpooled.wrappedBuffer(data)); // Garbage input should generate the 999 Unknown response. HttpResponse res = (HttpResponse) ch.readInbound(); assertThat(res.getProtocolVersion(), sameInstance(HttpVersion.HTTP_1_0)); assertThat(res.getStatus().code(), is(999)); assertThat(res.getDecoderResult().isFailure(), is(true)); assertThat(res.getDecoderResult().isFinished(), is(true)); assertThat(ch.readInbound(), is(nullValue())); // More garbage should not generate anything (i.e. the decoder discards anything beyond this point.) ch.writeInbound(Unpooled.wrappedBuffer(data)); assertThat(ch.readInbound(), is(nullValue())); // Closing the connection should not generate anything since the protocol has been violated. ch.finish(); assertThat(ch.readInbound(), is(nullValue())); } /** * Tests if the decoder produces one and only {@link LastHttpContent} when an invalid chunk is received and * the connection is closed. */ @Test public void testGarbageChunk() { EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder()); String responseWithIllegalChunk = "HTTP/1.1 200 OK\r\n" + "Transfer-Encoding: chunked\r\n\r\n" + "NOT_A_CHUNK_LENGTH\r\n"; channel.writeInbound(Unpooled.copiedBuffer(responseWithIllegalChunk, CharsetUtil.US_ASCII)); assertThat(channel.readInbound(), is(instanceOf(HttpResponse.class))); // Ensure that the decoder generates the last chunk with correct decoder result. LastHttpContent invalidChunk = (LastHttpContent) channel.readInbound(); assertThat(invalidChunk.getDecoderResult().isFailure(), is(true)); invalidChunk.release(); // And no more messages should be produced by the decoder. assertThat(channel.readInbound(), is(nullValue())); // .. even after the connection is closed. assertThat(channel.finish(), is(false)); } }
package cl.json; import android.app.Application; import android.content.ContentUris; import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.os.Build; import android.os.Environment; import android.provider.DocumentsContract; import android.provider.MediaStore; import androidx.annotation.NonNull; import androidx.loader.content.CursorLoader; import androidx.core.content.FileProvider; import android.text.TextUtils; import com.facebook.react.bridge.ReactContext; import java.io.File; import java.util.ArrayList; public class RNSharePathUtil { private static final ArrayList<String> authorities = new ArrayList<>(); public static void compileAuthorities(ReactContext reactContext) { if (authorities.size() == 0) { Application application = (Application) reactContext.getApplicationContext(); if (application instanceof ShareApplication) { authorities.add(((ShareApplication) application).getFileProviderAuthority()); } authorities.add(reactContext.getPackageName() + ".rnshare.fileprovider"); } } public static Uri compatUriFromFile(@NonNull final ReactContext context, @NonNull final File file) { compileAuthorities(context); String existingAuthority = Uri.fromFile(file).getAuthority(); // Authority is already set on this uri, no need to set it again if (!TextUtils.isEmpty(existingAuthority) && authorities.contains(existingAuthority)) { return Uri.fromFile(file); } // Already a content uri, cannot set authority on this if (file.getAbsolutePath().startsWith("content://")) { return Uri.fromFile(file); } // No authority present, getting FileProvider uri Uri result = null; for (int i = 0; i < authorities.size(); i++) { try { String authority = authorities.get(i); result = FileProvider.getUriForFile(context, authority, file); if (result != null) { break; } } catch (Exception e) { System.out.println("RNSharePathUtil::compatUriFromFile ERROR " + e.getMessage()); } } return result; } public static String getRealPathFromURI(final Context context, final Uri uri, Boolean useInternalStorage) { String filePrefix = ""; // DocumentProvider if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && DocumentsContract.isDocumentUri(context, uri)) { // ExternalStorageProvider if (isExternalStorageDocument(uri)) { final String docId = DocumentsContract.getDocumentId(uri); final String[] split = docId.split(":"); final String type = split[0]; if ("primary".equalsIgnoreCase(type) || "0".equalsIgnoreCase(type)) { File cacheDir = useInternalStorage ? context.getCacheDir() : context.getExternalCacheDir(); return filePrefix + cacheDir + "/" + split[1]; } else if ("raw".equalsIgnoreCase(type)) { return filePrefix + split[1]; } else if (!TextUtils.isEmpty(type)) { return filePrefix + "/storage/" + type + "/" + split[1]; } // TODO handle non-primary volumes } // DownloadsProvider else if (isDownloadsDocument(uri)) { final String id = DocumentsContract.getDocumentId(uri); if (id.startsWith("raw:")) { return filePrefix + id.replaceFirst("raw:", ""); } final Uri contentUri = ContentUris.withAppendedId(Uri.parse("content://downloads/public_downloads"), Long.valueOf(id)); return filePrefix + getDataColumn(context, contentUri, null, null); } // MediaProvider else if (isMediaDocument(uri)) { final String docId = DocumentsContract.getDocumentId(uri); final String[] split = docId.split(":"); final String type = split[0]; Uri contentUri = null; if ("image".equals(type)) { contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI; } else if ("video".equals(type)) { contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI; } else if ("audio".equals(type)) { contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI; } else if ("raw".equalsIgnoreCase(type)) { return filePrefix + split[1]; } final String selection = "_id=?"; final String[] selectionArgs = new String[]{ split[1] }; return filePrefix + getDataColumn(context, contentUri, selection, selectionArgs); } } // MediaStore (and general) else if ("content".equalsIgnoreCase(uri.getScheme())) { // Return the remote address if (isGooglePhotosUri(uri)) return uri.getLastPathSegment(); return filePrefix + getDataColumn(context, uri, null, null); } // File else if ("file".equalsIgnoreCase(uri.getScheme())) { return uri.getPath(); } return null; } /** * Get the value of the data column for this Uri. This is useful for * MediaStore Uris, and other file-based ContentProviders. * * @param context The context. * @param uri The Uri to query. * @param selection (Optional) Filter used in the query. * @param selectionArgs (Optional) Selection arguments used in the query. * @return The value of the _data column, which is typically a file path. */ public static String getDataColumn(Context context, Uri uri, String selection, String[] selectionArgs) { Cursor cursor = null; final String column = MediaStore.MediaColumns.DATA; final String[] projection = { column }; try { CursorLoader loader = new CursorLoader(context, uri, projection, selection, selectionArgs, null); cursor = loader.loadInBackground(); if (cursor != null && cursor.moveToFirst()) { final int index = cursor.getColumnIndexOrThrow(column); return cursor.getString(index); } } finally { if (cursor != null) cursor.close(); } return null; } /** * @param uri The Uri to check. * @return Whether the Uri authority is ExternalStorageProvider. */ public static boolean isExternalStorageDocument(Uri uri) { return "com.android.externalstorage.documents".equals(uri.getAuthority()); } /** * @param uri The Uri to check. * @return Whether the Uri authority is DownloadsProvider. */ public static boolean isDownloadsDocument(Uri uri) { return "com.android.providers.downloads.documents".equals(uri.getAuthority()); } /** * @param uri The Uri to check. * @return Whether the Uri authority is MediaProvider. */ public static boolean isMediaDocument(Uri uri) { return "com.android.providers.media.documents".equals(uri.getAuthority()); } /** * @param uri The Uri to check. * @return Whether the Uri authority is Google Photos. */ public static boolean isGooglePhotosUri(Uri uri) { return "com.google.android.apps.photos.content".equals(uri.getAuthority()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.applicationsmanager; import java.security.PrivilegedExceptionAction; import java.util.List; import org.junit.Assert; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.security.AMRMTokenIdentifier; import org.apache.hadoop.yarn.server.resourcemanager.ApplicationMasterService; import org.apache.hadoop.yarn.server.resourcemanager.MockAM; import org.apache.hadoop.yarn.server.resourcemanager.MockNM; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration; import org.junit.After; import org.junit.Before; import org.junit.Test; public class TestAMRMRPCNodeUpdates { private MockRM rm; private ApplicationMasterService amService; @Before public void setUp() { this.rm = new MockRM() { @Override public void init(Configuration conf) { conf.set( CapacitySchedulerConfiguration.MAXIMUM_APPLICATION_MASTERS_RESOURCE_PERCENT, "1.0"); super.init(conf); } }; rm.start(); amService = rm.getApplicationMasterService(); } @After public void tearDown() { if (rm != null) { this.rm.stop(); } } private void syncNodeHeartbeat(MockNM nm, boolean health) throws Exception { nm.nodeHeartbeat(health); rm.drainEvents(); } private void syncNodeLost(MockNM nm) throws Exception { rm.sendNodeStarted(nm); rm.waitForState(nm.getNodeId(), NodeState.RUNNING); rm.sendNodeLost(nm); rm.drainEvents(); } private AllocateResponse allocate(final ApplicationAttemptId attemptId, final AllocateRequest req) throws Exception { UserGroupInformation ugi = UserGroupInformation.createRemoteUser(attemptId.toString()); Token<AMRMTokenIdentifier> token = rm.getRMContext().getRMApps().get(attemptId.getApplicationId()) .getRMAppAttempt(attemptId).getAMRMToken(); ugi.addTokenIdentifier(token.decodeIdentifier()); return ugi.doAs(new PrivilegedExceptionAction<AllocateResponse>() { @Override public AllocateResponse run() throws Exception { return amService.allocate(req); } }); } @Test public void testAMRMUnusableNodes() throws Exception { MockNM nm1 = rm.registerNode("127.0.0.1:1234", 10000); MockNM nm2 = rm.registerNode("127.0.0.2:1234", 10000); MockNM nm3 = rm.registerNode("127.0.0.3:1234", 10000); MockNM nm4 = rm.registerNode("127.0.0.4:1234", 10000); rm.drainEvents(); RMApp app1 = rm.submitApp(2000); // Trigger the scheduling so the AM gets 'launched' on nm1 nm1.nodeHeartbeat(true); RMAppAttempt attempt1 = app1.getCurrentAppAttempt(); MockAM am1 = rm.sendAMLaunched(attempt1.getAppAttemptId()); // register AM returns no unusable node am1.registerAppAttempt(); // allocate request returns no updated node AllocateRequest allocateRequest1 = AllocateRequest.newInstance(0, 0F, null, null, null); AllocateResponse response1 = allocate(attempt1.getAppAttemptId(), allocateRequest1); List<NodeReport> updatedNodes = response1.getUpdatedNodes(); Assert.assertEquals(0, updatedNodes.size()); syncNodeHeartbeat(nm4, false); // allocate request returns updated node allocateRequest1 = AllocateRequest.newInstance(response1.getResponseId(), 0F, null, null, null); response1 = allocate(attempt1.getAppAttemptId(), allocateRequest1); updatedNodes = response1.getUpdatedNodes(); Assert.assertEquals(1, updatedNodes.size()); NodeReport nr = updatedNodes.iterator().next(); Assert.assertEquals(nm4.getNodeId(), nr.getNodeId()); Assert.assertEquals(NodeState.UNHEALTHY, nr.getNodeState()); // resending the allocate request returns the same result response1 = allocate(attempt1.getAppAttemptId(), allocateRequest1); updatedNodes = response1.getUpdatedNodes(); Assert.assertEquals(1, updatedNodes.size()); nr = updatedNodes.iterator().next(); Assert.assertEquals(nm4.getNodeId(), nr.getNodeId()); Assert.assertEquals(NodeState.UNHEALTHY, nr.getNodeState()); syncNodeLost(nm3); // subsequent allocate request returns delta allocateRequest1 = AllocateRequest.newInstance(response1.getResponseId(), 0F, null, null, null); response1 = allocate(attempt1.getAppAttemptId(), allocateRequest1); updatedNodes = response1.getUpdatedNodes(); Assert.assertEquals(1, updatedNodes.size()); nr = updatedNodes.iterator().next(); Assert.assertEquals(nm3.getNodeId(), nr.getNodeId()); Assert.assertEquals(NodeState.LOST, nr.getNodeState()); // registering another AM gives it the complete failed list RMApp app2 = rm.submitApp(2000); // Trigger nm2 heartbeat so that AM gets launched on it nm2.nodeHeartbeat(true); RMAppAttempt attempt2 = app2.getCurrentAppAttempt(); MockAM am2 = rm.sendAMLaunched(attempt2.getAppAttemptId()); // register AM returns all unusable nodes am2.registerAppAttempt(); // allocate request returns no updated node AllocateRequest allocateRequest2 = AllocateRequest.newInstance(0, 0F, null, null, null); AllocateResponse response2 = allocate(attempt2.getAppAttemptId(), allocateRequest2); updatedNodes = response2.getUpdatedNodes(); Assert.assertEquals(0, updatedNodes.size()); syncNodeHeartbeat(nm4, true); // both AM's should get delta updated nodes allocateRequest1 = AllocateRequest.newInstance(response1.getResponseId(), 0F, null, null, null); response1 = allocate(attempt1.getAppAttemptId(), allocateRequest1); updatedNodes = response1.getUpdatedNodes(); Assert.assertEquals(1, updatedNodes.size()); nr = updatedNodes.iterator().next(); Assert.assertEquals(nm4.getNodeId(), nr.getNodeId()); Assert.assertEquals(NodeState.RUNNING, nr.getNodeState()); allocateRequest2 = AllocateRequest.newInstance(response2.getResponseId(), 0F, null, null, null); response2 = allocate(attempt2.getAppAttemptId(), allocateRequest2); updatedNodes = response2.getUpdatedNodes(); Assert.assertEquals(1, updatedNodes.size()); nr = updatedNodes.iterator().next(); Assert.assertEquals(nm4.getNodeId(), nr.getNodeId()); Assert.assertEquals(NodeState.RUNNING, nr.getNodeState()); // subsequent allocate calls should return no updated nodes allocateRequest2 = AllocateRequest.newInstance(response2.getResponseId(), 0F, null, null, null); response2 = allocate(attempt2.getAppAttemptId(), allocateRequest2); updatedNodes = response2.getUpdatedNodes(); Assert.assertEquals(0, updatedNodes.size()); // how to do the above for LOST node } }
package com.github.scribejava.core.oauth; import java.io.IOException; import java.util.concurrent.Future; import com.github.scribejava.core.builder.api.DefaultApi20; import com.github.scribejava.core.extractors.OAuth2AccessTokenJsonExtractor; import com.github.scribejava.core.httpclient.HttpClient; import com.github.scribejava.core.httpclient.HttpClientConfig; import com.github.scribejava.core.model.OAuth2AccessToken; import com.github.scribejava.core.model.OAuth2Authorization; import com.github.scribejava.core.model.OAuthAsyncRequestCallback; import com.github.scribejava.core.model.OAuthConstants; import com.github.scribejava.core.model.OAuthRequest; import com.github.scribejava.core.model.Response; import com.github.scribejava.core.model.Verb; import com.github.scribejava.core.pkce.PKCE; import java.util.Map; import java.util.concurrent.ExecutionException; import com.github.scribejava.core.revoke.TokenTypeHint; public class OAuth20Service extends OAuthService { private static final String VERSION = "2.0"; private final DefaultApi20 api; private final String responseType; private final String defaultScope; public OAuth20Service(DefaultApi20 api, String apiKey, String apiSecret, String callback, String defaultScope, String responseType, String userAgent, HttpClientConfig httpClientConfig, HttpClient httpClient) { super(apiKey, apiSecret, callback, userAgent, httpClientConfig, httpClient); this.responseType = responseType; this.api = api; this.defaultScope = defaultScope; } //protected to facilitate mocking protected OAuth2AccessToken sendAccessTokenRequestSync(OAuthRequest request) throws IOException, InterruptedException, ExecutionException { return api.getAccessTokenExtractor().extract(execute(request)); } //protected to facilitate mocking protected Future<OAuth2AccessToken> sendAccessTokenRequestAsync(OAuthRequest request) { return sendAccessTokenRequestAsync(request, null); } //protected to facilitate mocking protected Future<OAuth2AccessToken> sendAccessTokenRequestAsync(OAuthRequest request, OAuthAsyncRequestCallback<OAuth2AccessToken> callback) { return execute(request, callback, new OAuthRequest.ResponseConverter<OAuth2AccessToken>() { @Override public OAuth2AccessToken convert(Response response) throws IOException { return getApi().getAccessTokenExtractor().extract(response); } }); } public Future<OAuth2AccessToken> getAccessTokenAsync(String code) { return getAccessToken(AccessTokenRequestParams.create(code), null); } public Future<OAuth2AccessToken> getAccessTokenAsync(AccessTokenRequestParams params) { return getAccessToken(params, null); } public OAuth2AccessToken getAccessToken(String code) throws IOException, InterruptedException, ExecutionException { return getAccessToken(AccessTokenRequestParams.create(code)); } public OAuth2AccessToken getAccessToken(AccessTokenRequestParams params) throws IOException, InterruptedException, ExecutionException { return sendAccessTokenRequestSync(createAccessTokenRequest(params)); } /** * Start the request to retrieve the access token. The optionally provided callback will be called with the Token * when it is available. * * @param params params * @param callback optional callback * @return Future */ public Future<OAuth2AccessToken> getAccessToken(AccessTokenRequestParams params, OAuthAsyncRequestCallback<OAuth2AccessToken> callback) { return sendAccessTokenRequestAsync(createAccessTokenRequest(params), callback); } public Future<OAuth2AccessToken> getAccessToken(String code, OAuthAsyncRequestCallback<OAuth2AccessToken> callback) { return getAccessToken(AccessTokenRequestParams.create(code), callback); } protected OAuthRequest createAccessTokenRequest(AccessTokenRequestParams params) { final OAuthRequest request = new OAuthRequest(api.getAccessTokenVerb(), api.getAccessTokenEndpoint()); api.getClientAuthentication().addClientAuthentication(request, getApiKey(), getApiSecret()); request.addParameter(OAuthConstants.CODE, params.getCode()); final String callback = getCallback(); if (callback != null) { request.addParameter(OAuthConstants.REDIRECT_URI, callback); } final String scope = params.getScope(); if (scope != null) { request.addParameter(OAuthConstants.SCOPE, scope); } else if (defaultScope != null) { request.addParameter(OAuthConstants.SCOPE, defaultScope); } request.addParameter(OAuthConstants.GRANT_TYPE, OAuthConstants.AUTHORIZATION_CODE); final String pkceCodeVerifier = params.getPkceCodeVerifier(); if (pkceCodeVerifier != null) { request.addParameter(PKCE.PKCE_CODE_VERIFIER_PARAM, pkceCodeVerifier); } return request; } public Future<OAuth2AccessToken> refreshAccessTokenAsync(String refreshToken) { return refreshAccessToken(refreshToken, (OAuthAsyncRequestCallback<OAuth2AccessToken>) null); } public Future<OAuth2AccessToken> refreshAccessTokenAsync(String refreshToken, String scope) { return refreshAccessToken(refreshToken, scope, null); } public OAuth2AccessToken refreshAccessToken(String refreshToken) throws IOException, InterruptedException, ExecutionException { final OAuthRequest request = createRefreshTokenRequest(refreshToken, null); return sendAccessTokenRequestSync(request); } public OAuth2AccessToken refreshAccessToken(String refreshToken, String scope) throws IOException, InterruptedException, ExecutionException { final OAuthRequest request = createRefreshTokenRequest(refreshToken, scope); return sendAccessTokenRequestSync(request); } public Future<OAuth2AccessToken> refreshAccessToken(String refreshToken, OAuthAsyncRequestCallback<OAuth2AccessToken> callback) { final OAuthRequest request = createRefreshTokenRequest(refreshToken, null); return sendAccessTokenRequestAsync(request, callback); } public Future<OAuth2AccessToken> refreshAccessToken(String refreshToken, String scope, OAuthAsyncRequestCallback<OAuth2AccessToken> callback) { final OAuthRequest request = createRefreshTokenRequest(refreshToken, scope); return sendAccessTokenRequestAsync(request, callback); } protected OAuthRequest createRefreshTokenRequest(String refreshToken, String scope) { if (refreshToken == null || refreshToken.isEmpty()) { throw new IllegalArgumentException("The refreshToken cannot be null or empty"); } final OAuthRequest request = new OAuthRequest(api.getAccessTokenVerb(), api.getRefreshTokenEndpoint()); api.getClientAuthentication().addClientAuthentication(request, getApiKey(), getApiSecret()); if (scope != null) { request.addParameter(OAuthConstants.SCOPE, scope); } else if (defaultScope != null) { request.addParameter(OAuthConstants.SCOPE, defaultScope); } request.addParameter(OAuthConstants.REFRESH_TOKEN, refreshToken); request.addParameter(OAuthConstants.GRANT_TYPE, OAuthConstants.REFRESH_TOKEN); return request; } public OAuth2AccessToken getAccessTokenPasswordGrant(String username, String password) throws IOException, InterruptedException, ExecutionException { final OAuthRequest request = createAccessTokenPasswordGrantRequest(username, password, null); return sendAccessTokenRequestSync(request); } public OAuth2AccessToken getAccessTokenPasswordGrant(String username, String password, String scope) throws IOException, InterruptedException, ExecutionException { final OAuthRequest request = createAccessTokenPasswordGrantRequest(username, password, scope); return sendAccessTokenRequestSync(request); } public Future<OAuth2AccessToken> getAccessTokenPasswordGrantAsync(String username, String password) { return getAccessTokenPasswordGrantAsync(username, password, (OAuthAsyncRequestCallback<OAuth2AccessToken>) null); } public Future<OAuth2AccessToken> getAccessTokenPasswordGrantAsync(String username, String password, String scope) { return getAccessTokenPasswordGrantAsync(username, password, scope, null); } /** * Request Access Token Password Grant async version * * @param username User name * @param password User password * @param callback Optional callback * @return Future */ public Future<OAuth2AccessToken> getAccessTokenPasswordGrantAsync(String username, String password, OAuthAsyncRequestCallback<OAuth2AccessToken> callback) { final OAuthRequest request = createAccessTokenPasswordGrantRequest(username, password, null); return sendAccessTokenRequestAsync(request, callback); } public Future<OAuth2AccessToken> getAccessTokenPasswordGrantAsync(String username, String password, String scope, OAuthAsyncRequestCallback<OAuth2AccessToken> callback) { final OAuthRequest request = createAccessTokenPasswordGrantRequest(username, password, scope); return sendAccessTokenRequestAsync(request, callback); } protected OAuthRequest createAccessTokenPasswordGrantRequest(String username, String password, String scope) { final OAuthRequest request = new OAuthRequest(api.getAccessTokenVerb(), api.getAccessTokenEndpoint()); request.addParameter(OAuthConstants.USERNAME, username); request.addParameter(OAuthConstants.PASSWORD, password); if (scope != null) { request.addParameter(OAuthConstants.SCOPE, scope); } else if (defaultScope != null) { request.addParameter(OAuthConstants.SCOPE, defaultScope); } request.addParameter(OAuthConstants.GRANT_TYPE, OAuthConstants.PASSWORD); api.getClientAuthentication().addClientAuthentication(request, getApiKey(), getApiSecret()); return request; } public Future<OAuth2AccessToken> getAccessTokenClientCredentialsGrantAsync() { return getAccessTokenClientCredentialsGrant((OAuthAsyncRequestCallback<OAuth2AccessToken>) null); } public Future<OAuth2AccessToken> getAccessTokenClientCredentialsGrantAsync(String scope) { return getAccessTokenClientCredentialsGrant(scope, null); } public OAuth2AccessToken getAccessTokenClientCredentialsGrant() throws IOException, InterruptedException, ExecutionException { final OAuthRequest request = createAccessTokenClientCredentialsGrantRequest(null); return sendAccessTokenRequestSync(request); } public OAuth2AccessToken getAccessTokenClientCredentialsGrant(String scope) throws IOException, InterruptedException, ExecutionException { final OAuthRequest request = createAccessTokenClientCredentialsGrantRequest(scope); return sendAccessTokenRequestSync(request); } /** * Start the request to retrieve the access token using client-credentials grant. The optionally provided callback * will be called with the Token when it is available. * * @param callback optional callback * @return Future */ public Future<OAuth2AccessToken> getAccessTokenClientCredentialsGrant( OAuthAsyncRequestCallback<OAuth2AccessToken> callback) { final OAuthRequest request = createAccessTokenClientCredentialsGrantRequest(null); return sendAccessTokenRequestAsync(request, callback); } public Future<OAuth2AccessToken> getAccessTokenClientCredentialsGrant(String scope, OAuthAsyncRequestCallback<OAuth2AccessToken> callback) { final OAuthRequest request = createAccessTokenClientCredentialsGrantRequest(scope); return sendAccessTokenRequestAsync(request, callback); } protected OAuthRequest createAccessTokenClientCredentialsGrantRequest(String scope) { final OAuthRequest request = new OAuthRequest(api.getAccessTokenVerb(), api.getAccessTokenEndpoint()); api.getClientAuthentication().addClientAuthentication(request, getApiKey(), getApiSecret()); if (scope != null) { request.addParameter(OAuthConstants.SCOPE, scope); } else if (defaultScope != null) { request.addParameter(OAuthConstants.SCOPE, defaultScope); } request.addParameter(OAuthConstants.GRANT_TYPE, OAuthConstants.CLIENT_CREDENTIALS); return request; } /** * {@inheritDoc} */ @Override public String getVersion() { return VERSION; } public void signRequest(String accessToken, OAuthRequest request) { api.getBearerSignature().signRequest(accessToken, request); } public void signRequest(OAuth2AccessToken accessToken, OAuthRequest request) { signRequest(accessToken == null ? null : accessToken.getAccessToken(), request); } /** * Returns the URL where you should redirect your users to authenticate your application. * * @return the URL where you should redirect your users */ public String getAuthorizationUrl() { return createAuthorizationUrlBuilder().build(); } public String getAuthorizationUrl(String state) { return createAuthorizationUrlBuilder() .state(state) .build(); } /** * Returns the URL where you should redirect your users to authenticate your application. * * @param additionalParams any additional GET params to add to the URL * @return the URL where you should redirect your users */ public String getAuthorizationUrl(Map<String, String> additionalParams) { return createAuthorizationUrlBuilder() .additionalParams(additionalParams) .build(); } public String getAuthorizationUrl(PKCE pkce) { return createAuthorizationUrlBuilder() .pkce(pkce) .build(); } public AuthorizationUrlBuilder createAuthorizationUrlBuilder() { return new AuthorizationUrlBuilder(this); } public DefaultApi20 getApi() { return api; } protected OAuthRequest createRevokeTokenRequest(String tokenToRevoke, TokenTypeHint tokenTypeHint) { final OAuthRequest request = new OAuthRequest(Verb.POST, api.getRevokeTokenEndpoint()); api.getClientAuthentication().addClientAuthentication(request, getApiKey(), getApiSecret()); request.addParameter("token", tokenToRevoke); if (tokenTypeHint != null) { request.addParameter("token_type_hint", tokenTypeHint.getValue()); } return request; } public Future<Void> revokeTokenAsync(String tokenToRevoke) { return revokeTokenAsync(tokenToRevoke, null); } public Future<Void> revokeTokenAsync(String tokenToRevoke, TokenTypeHint tokenTypeHint) { return revokeToken(tokenToRevoke, null, tokenTypeHint); } public void revokeToken(String tokenToRevoke) throws IOException, InterruptedException, ExecutionException { revokeToken(tokenToRevoke, (TokenTypeHint) null); } public void revokeToken(String tokenToRevoke, TokenTypeHint tokenTypeHint) throws IOException, InterruptedException, ExecutionException { final OAuthRequest request = createRevokeTokenRequest(tokenToRevoke, tokenTypeHint); checkForErrorRevokeToken(execute(request)); } public Future<Void> revokeToken(String tokenToRevoke, OAuthAsyncRequestCallback<Void> callback) { return revokeToken(tokenToRevoke, callback, null); } public Future<Void> revokeToken(String tokenToRevoke, OAuthAsyncRequestCallback<Void> callback, TokenTypeHint tokenTypeHint) { final OAuthRequest request = createRevokeTokenRequest(tokenToRevoke, tokenTypeHint); return execute(request, callback, new OAuthRequest.ResponseConverter<Void>() { @Override public Void convert(Response response) throws IOException { checkForErrorRevokeToken(response); return null; } }); } private void checkForErrorRevokeToken(Response response) throws IOException { if (response.getCode() != 200) { OAuth2AccessTokenJsonExtractor.instance().generateError(response.getBody()); } } public OAuth2Authorization extractAuthorization(String redirectLocation) { final OAuth2Authorization authorization = new OAuth2Authorization(); int end = redirectLocation.indexOf('#'); if (end == -1) { end = redirectLocation.length(); } for (String param : redirectLocation.substring(redirectLocation.indexOf('?') + 1, end).split("&")) { final String[] keyValue = param.split("="); if (keyValue.length == 2) { switch (keyValue[0]) { case "code": authorization.setCode(keyValue[1]); break; case "state": authorization.setState(keyValue[1]); break; default: //just ignore any other param; } } } return authorization; } public String getResponseType() { return responseType; } public String getDefaultScope() { return defaultScope; } }
package com.lachesis.support.auth.repository; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.*; import static org.junit.Assert.assertThat; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.annotation.Rollback; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.lachesis.support.auth.annotation.RepositoryTestContext; import com.lachesis.support.objects.entity.auth.Role; import com.lachesis.support.objects.entity.auth.User; import com.lachesis.support.objects.entity.auth.UserRole; @RunWith(SpringJUnit4ClassRunner.class) @RepositoryTestContext public class UserRepositoryTest { @Autowired UserRepository userRepo; @Autowired RoleRepository roleRepo; @Before public void setUp() throws Exception { } @Test public void testFindOne() { User u = mockUser(); userRepo.insertOne(u); assertThat(u.getId(), notNullValue()); User u1 = userRepo.findOne(u.getId()); assertThat(u1, notNullValue()); } @Test public void testFindOneByUsername() { User u = mockUser(); userRepo.insertOne(u); assertThat(u.getId(), notNullValue()); User u1 = userRepo.findOneByUsername(u.getUsername()); assertThat(u1, notNullValue()); } @Test @Rollback(false) public void testInsertOne() { User u = mockUser(); Long ret = userRepo.insertOne(u); assertThat(ret, greaterThan(0L)); assertThat(u.getId(), notNullValue()); } @Test public void testUpdateOne() { User u = mockUser(); userRepo.insertOne(u); assertThat(u.getId(), notNullValue()); String mobilePhone = "13789899999"; User u2 = userRepo.findOne(u.getId()); assertThat(u2, notNullValue()); assertThat(u2.getMobilePhone(),not(mobilePhone)); User u1 = new User(); u1.setId(u.getId()); u1.setCreateAt(new Date()); u1.setMobilePhone(mobilePhone); int ret = userRepo.updateOne(u1); assertThat(ret, greaterThan(0)); u2 = userRepo.findOne(u.getId()); assertThat(u2, notNullValue()); assertThat(u2.getMobilePhone(), equalTo(mobilePhone)); } @Test public void testAddRole() { User u = mockUser(); Role r = mockRole(); long retUser = userRepo.insertOne(u); long retRole = roleRepo.insertOne(r); assertThat(retUser, greaterThan(0L)); assertThat(retRole, greaterThan(0L)); assertThat(u.getId(), notNullValue()); assertThat(r.getId(), notNullValue()); UserRole ur = mockUserRole(u.getId(), r.getId()); userRepo.addRole(ur); List<Role> roles = roleRepo.findByUserId(u.getId()); assertThat(roles, notNullValue()); assertThat(roles.size(), greaterThan(0)); assertThat(roles.get(0).getName(), equalTo(r.getName())); } @Test public void testAddRoles() { User u = mockUser(); Role r1 = mockRole(); Role r2 = mockRole(); userRepo.insertOne(u); roleRepo.insertOne(r1); roleRepo.insertOne(r2); assertThat(u.getId(), notNullValue()); assertThat(r1.getId(), notNullValue()); assertThat(r2.getId(), notNullValue()); List<UserRole> urs = mockUserRoles(u.getId(), r1.getId(),r2.getId()); userRepo.addRoles(urs); List<Role> roles = roleRepo.findByUserId(u.getId()); assertThat(roles, notNullValue()); assertThat(roles.size(), equalTo(2)); } @Test public void testDeleteRoles() { User u = mockUser(); Role r1 = mockRole(); Role r2 = mockRole(); userRepo.insertOne(u); roleRepo.insertOne(r1); roleRepo.insertOne(r2); assertThat(u.getId(), notNullValue()); assertThat(r1.getId(), notNullValue()); assertThat(r2.getId(), notNullValue()); List<UserRole> urs = mockUserRoles(u.getId(), r1.getId(),r2.getId()); userRepo.addRoles(urs); List<Role> roles = roleRepo.findByUserId(u.getId()); assertThat(roles, notNullValue()); assertThat(roles.size(), equalTo(2)); List<Role> rolesToDelete = new ArrayList<Role>(); rolesToDelete.add(r1); rolesToDelete.add(r2); userRepo.deleteRoles(u.getId(), rolesToDelete); roles = roleRepo.findByUserId(u.getId()); assertThat(roles, notNullValue()); assertThat(roles.size(), equalTo(0)); } @Test public void testDeleteRole() { User u = mockUser(); Role r1 = mockRole(); Role r2 = mockRole(); userRepo.insertOne(u); roleRepo.insertOne(r1); roleRepo.insertOne(r2); assertThat(u.getId(), notNullValue()); assertThat(r1.getId(), notNullValue()); assertThat(r2.getId(), notNullValue()); List<UserRole> urs = mockUserRoles(u.getId(), r1.getId(),r2.getId()); userRepo.addRoles(urs); List<Role> roles = roleRepo.findByUserId(u.getId()); assertThat(roles, notNullValue()); assertThat(roles.size(), equalTo(2)); userRepo.deleteRole(u.getId(), r1); roles = roleRepo.findByUserId(u.getId()); assertThat(roles, notNullValue()); assertThat(roles.size(), equalTo(1)); } @Test public void testDeleteAllRoles() { User u = mockUser(); Role r1 = mockRole(); Role r2 = mockRole(); userRepo.insertOne(u); roleRepo.insertOne(r1); roleRepo.insertOne(r2); assertThat(u.getId(), notNullValue()); assertThat(r1.getId(), notNullValue()); assertThat(r2.getId(), notNullValue()); List<UserRole> urs = mockUserRoles(u.getId(), r1.getId(),r2.getId()); userRepo.addRoles(urs); List<Role> roles = roleRepo.findByUserId(u.getId()); assertThat(roles, notNullValue()); assertThat(roles.size(), equalTo(2)); userRepo.deleteAllRoles(u.getId()); roles = roleRepo.findByUserId(u.getId()); assertThat(roles, notNullValue()); assertThat(roles.size(), equalTo(0)); } @Test public void testDeleteOne() { User u = mockUser(); Long ret = userRepo.insertOne(u); assertThat(ret, greaterThan(0L)); assertThat(u.getId(), notNullValue()); int retDel = userRepo.deleteOne(u.getId()); assertThat(retDel, equalTo(1)); u = userRepo.findOne(u.getId()); assertThat(u, nullValue()); } private User mockUser() { User u = new User(); u.setId(System.nanoTime()); u.setUsername("user-test-" + System.nanoTime()); u.setName("USER-TEST"); u.setCode("code-test"); u.setTelephone("0755-89896666"); u.setPassword("123"); u.setCreateAt(new Date()); u.setActive(true); u.setDeleted(false); u.setLocked(false); u.setCreateBy(1L); u.setEmail("abcddd@lachesis.com"); u.setDataSource("SYSTEM"); u.setGender("M"); u.setMobilePhone("189252222211"); u.setUpdateAt(new Date()); u.setUpdateBy(2L); return u; } private Role mockRole(){ Role r = new Role(); r.setCode("ROLE_TEST-"); r.setCreateAt(new Date()); r.setName("ROLE-TEST-"+System.nanoTime()); r.setDeleted(false); return r; } private UserRole mockUserRole(long userId, long roleId){ UserRole ur = new UserRole(); ur.setId(System.nanoTime()); ur.setUserId(userId); ur.setRoleId(roleId); ur.setDataSource("SYSTEM"); ur.setCreateBy(userId); ur.setCreateAt(new Date()); ur.setDeleted(false); return ur; } private List<UserRole> mockUserRoles(long userId, Long...roleIds){ List<UserRole> urs = new ArrayList<UserRole>(); for(long roleId : roleIds){ urs.add(mockUserRole(userId, roleId)); } return urs; } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.controllers.desktop; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Graphics.GraphicsType; import com.badlogic.gdx.controllers.ControlType; import com.badlogic.gdx.controllers.Controller; import com.badlogic.gdx.controllers.ControllerListener; import com.badlogic.gdx.controllers.PovDirection; import com.badlogic.gdx.controllers.desktop.ois.Ois; import com.badlogic.gdx.controllers.desktop.ois.OisJoystick; import com.badlogic.gdx.controllers.desktop.ois.OisJoystick.OisPov; import com.badlogic.gdx.controllers.desktop.ois.OisListener; import com.badlogic.gdx.math.Vector3; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.GdxRuntimeException; import java.awt.Component; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.util.ArrayList; import javax.swing.SwingUtilities; /** @author Nathan Sweet */ public class OisControllers { final DesktopControllerManager manager; long hwnd = getWindowHandle(); Ois ois = new Ois(hwnd); OisController[] controllers; private static final boolean IS_MAC = System.getProperty("os.name").toLowerCase().contains("mac"); private static final boolean IS_WINDOWS = System.getProperty("os.name").toLowerCase().contains("windows"); private static final long CHECK_FOR_LOST_WINDOW_HANDLE_INTERVAL = 1000000000L; // in nanoseconds. 1000000000 == 1 second public OisControllers (final DesktopControllerManager manager) { this.manager = manager; ArrayList<OisJoystick> joysticks = ois.getJoysticks(); controllers = new OisController[joysticks.size()]; for (int i = 0, n = joysticks.size(); i < n; i++) { OisJoystick joystick = joysticks.get(i); controllers[i] = new OisController(joystick); manager.controllers.add(controllers[i]); } new Runnable() { private long lastCheckForLostWindowHandleTime; public void run () { // we won't do the rather heavy check for a lost window handle each and every frame, but rather each second only long now = System.nanoTime(); if (now - lastCheckForLostWindowHandleTime > CHECK_FOR_LOST_WINDOW_HANDLE_INTERVAL) { lastCheckForLostWindowHandleTime = now; long newWindowHandle = getWindowHandle(); if (hwnd != newWindowHandle) { hwnd = newWindowHandle; ois = new Ois(newWindowHandle); ArrayList<OisJoystick> joysticks = ois.getJoysticks(); OisController[] oldControllers = controllers; controllers = new OisController[joysticks.size()]; manager.controllers.clear(); for (int i = 0, n = joysticks.size(); i < n; i++) { OisJoystick joystick = joysticks.get(i); controllers[i] = new OisController(joystick); if (controllers.length == oldControllers.length && oldControllers[i].getName().equals(controllers[i].getName())) { if (oldControllers[i].listeners.size != 0) { controllers[i].listeners.addAll(oldControllers[i].listeners); } } else { for (int j = 0; j < oldControllers.length; j++) { if (oldControllers[j].listeners.size != 0 && oldControllers[j].getName().equals(controllers[i].getName())) { controllers[i].listeners.addAll(oldControllers[j].listeners); break; } } } manager.controllers.add(controllers[i]); } } } ois.update(); Gdx.app.postRunnable(this); } }.run(); } /** @author Nathan Sweet */ class OisController implements Controller { private final OisJoystick joystick; final Array<ControllerListener> listeners = new Array(); public OisController (OisJoystick joystick) { this.joystick = joystick; joystick.setListener(new OisListener() { public void buttonReleased (OisJoystick joystick, int buttonIndex) { Array<ControllerListener> allListeners = manager.listeners; for (int ii = 0, nn = allListeners.size; ii < nn; ii++) allListeners.get(ii).buttonUp(OisController.this, buttonIndex); for (int ii = 0, nn = listeners.size; ii < nn; ii++) listeners.get(ii).buttonUp(OisController.this, buttonIndex); } public void buttonPressed (OisJoystick joystick, int buttonIndex) { Array<ControllerListener> allListeners = manager.listeners; for (int ii = 0, nn = allListeners.size; ii < nn; ii++) allListeners.get(ii).buttonDown(OisController.this, buttonIndex); for (int ii = 0, nn = listeners.size; ii < nn; ii++) listeners.get(ii).buttonDown(OisController.this, buttonIndex); } public void axisMoved (OisJoystick joystick, int axisIndex, float value) { Array<ControllerListener> allListeners = manager.listeners; for (int ii = 0, nn = allListeners.size; ii < nn; ii++) allListeners.get(ii).axisMoved(OisController.this, axisIndex, value); for (int ii = 0, nn = listeners.size; ii < nn; ii++) listeners.get(ii).axisMoved(OisController.this, axisIndex, value); } public void povMoved (OisJoystick joystick, int povIndex, OisPov ignored) { PovDirection value = getPov(povIndex); Array<ControllerListener> allListeners = manager.listeners; for (int ii = 0, nn = allListeners.size; ii < nn; ii++) allListeners.get(ii).povMoved(OisController.this, povIndex, value); for (int ii = 0, nn = listeners.size; ii < nn; ii++) listeners.get(ii).povMoved(OisController.this, povIndex, value); } public void xSliderMoved (OisJoystick joystick, int sliderIndex, boolean value) { Array<ControllerListener> allListeners = manager.listeners; for (int ii = 0, nn = allListeners.size; ii < nn; ii++) allListeners.get(ii).xSliderMoved(OisController.this, sliderIndex, value); for (int ii = 0, nn = listeners.size; ii < nn; ii++) listeners.get(ii).xSliderMoved(OisController.this, sliderIndex, value); } public void ySliderMoved (OisJoystick joystick, int sliderIndex, boolean value) { Array<ControllerListener> allListeners = manager.listeners; for (int ii = 0, nn = allListeners.size; ii < nn; ii++) allListeners.get(ii).ySliderMoved(OisController.this, sliderIndex, value); for (int ii = 0, nn = listeners.size; ii < nn; ii++) listeners.get(ii).ySliderMoved(OisController.this, sliderIndex, value); } }); } public boolean getButton (int buttonIndex) { return joystick.isButtonPressed(buttonIndex); } public float getAxis (int axisIndex) { return joystick.getAxis(axisIndex); } public PovDirection getPov (int povIndex) { OisPov pov = joystick.getPov(povIndex); switch (pov) { case Centered: return PovDirection.center; case East: return PovDirection.east; case North: return PovDirection.north; case NorthEast: return PovDirection.northEast; case NorthWest: return PovDirection.northWest; case South: return PovDirection.south; case SouthEast: return PovDirection.southEast; case SouthWest: return PovDirection.southWest; case West: return PovDirection.west; } return null; // Impossible. } public boolean getSliderX (int sliderIndex) { return joystick.getSliderX(sliderIndex); } public boolean getSliderY (int sliderIndex) { return joystick.getSliderY(sliderIndex); } public Vector3 getAccelerometer (int accelerometerIndex) { throw new GdxRuntimeException("Invalid accelerometer index: " + accelerometerIndex); } public void setAccelerometerSensitivity (float sensitivity) { } public int getControlCount (ControlType type) { switch (type) { case button: return joystick.getButtonCount(); case axis: return joystick.getAxisCount(); case slider: return joystick.getSliderCount(); case pov: return joystick.getPovCount(); } return 0; } public void addListener (ControllerListener listener) { listeners.add(listener); } public void removeListener (ControllerListener listener) { listeners.removeValue(listener, true); } public String getName () { return joystick.getName(); } public String toString () { return joystick.getName(); } } /** Returns the window handle from LWJGL needed by OIS. */ static public long getWindowHandle () { // don't need a window handle for Mac OS X if (IS_MAC) { return 0; } try { if (Gdx.graphics.getType() == GraphicsType.JGLFW) return (Long)Gdx.graphics.getClass().getDeclaredMethod("getWindow").invoke(null); if (Gdx.graphics.getType() == GraphicsType.LWJGL) { if (Gdx.app.getClass().getName().equals("com.badlogic.gdx.backends.lwjgl.LwjglCanvas")) { Class canvasClass = Class.forName("com.badlogic.gdx.backends.lwjgl.LwjglCanvas"); Object canvas = canvasClass.getDeclaredMethod("getCanvas").invoke(Gdx.app); return (Long)invokeMethod(invokeMethod(SwingUtilities.windowForComponent((Component)canvas), "getPeer"), "getHWnd"); } Class displayClass = Class.forName("org.lwjgl.opengl.Display"); Method getImplementation = displayClass.getDeclaredMethod("getImplementation", new Class[0]); getImplementation.setAccessible(true); Object display = getImplementation.invoke(null, (Object[])null); Field field = display.getClass().getDeclaredField(IS_WINDOWS ? "hwnd" : "parent_window"); field.setAccessible(true); return (Long)field.get(display); } } catch (Exception ex) { throw new RuntimeException("Unable to get window handle.", ex); } return 0; } static private Object invokeMethod (Object object, String methodName) throws Exception { for (Method m : object.getClass().getMethods()) if (m.getName().equals(methodName)) return m.invoke(object); throw new RuntimeException("Could not find method '" + methodName + "' on class: " + object.getClass()); } }
package net.sf.jabref.logic.net; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.net.CookieHandler; import java.net.CookieManager; import java.net.CookiePolicy; import java.net.HttpCookie; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.net.URLConnection; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import net.sf.jabref.logic.util.io.FileUtil; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * URL download to a string. * <p> * Example: * URLDownload dl = new URLDownload(URL); * String content = dl.downloadToString(ENCODING); * dl.downloadToFile(FILE); // available in FILE * String contentType = dl.determineMimeType(); * * Each call to a public method creates a new HTTP connection. Nothing is cached. * * @author Erik Putrycz erik.putrycz-at-nrc-cnrc.gc.ca * @author Simon Harrer */ public class URLDownload { private static final Log LOGGER = LogFactory.getLog(URLDownload.class); private static final String USER_AGENT= "JabRef"; private final URL source; private final Map<String, String> parameters = new HashMap<>(); private String postData = ""; public static URLDownload createURLDownloadWithBrowserUserAgent(String address) throws MalformedURLException { URLDownload downloader = new URLDownload(address); downloader.addParameters("User-Agent", "Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0"); return downloader; } /** * @param address the URL to download from * @throws MalformedURLException if no protocol is specified in the address, or an unknown protocol is found */ public URLDownload(String address) throws MalformedURLException { this(new URL(address)); } /** * @param source The URL to download. */ public URLDownload(URL source) { this.source = source; addParameters("User-Agent", USER_AGENT); } public URL getSource() { return source; } public String determineMimeType() throws IOException { // this does not cause a real performance issue as the underlying HTTP/TCP connection is reused URLConnection urlConnection = openConnection(); try { return urlConnection.getContentType(); } finally { try { urlConnection.getInputStream().close(); } catch (IOException ignored) { // Ignored } } } public void addParameters(String key, String value) { parameters.put(key, value); } public void setPostData(String postData) { if (postData != null) { this.postData = postData; } } private URLConnection openConnection() throws IOException { URLConnection connection = source.openConnection(); for (Map.Entry<String, String> entry : parameters.entrySet()) { connection.setRequestProperty(entry.getKey(), entry.getValue()); } if (!postData.isEmpty()) { connection.setDoOutput(true); try (DataOutputStream wr = new DataOutputStream(connection.getOutputStream())) { wr.writeBytes(postData); } } if (connection instanceof HttpURLConnection) { // normally, 3xx is redirect int status = ((HttpURLConnection) connection).getResponseCode(); if (status != HttpURLConnection.HTTP_OK) { if (status == HttpURLConnection.HTTP_MOVED_TEMP || status == HttpURLConnection.HTTP_MOVED_PERM || status == HttpURLConnection.HTTP_SEE_OTHER) { // get redirect url from "location" header field String newUrl = connection.getHeaderField("Location"); // open the new connnection again connection = (HttpURLConnection) new URLDownload(newUrl).openConnection(); } } } // this does network i/o: GET + read returned headers connection.connect(); return connection; } /** * * @return the downloaded string * @throws IOException */ public String downloadToString(Charset encoding) throws IOException { try (InputStream input = new BufferedInputStream(openConnection().getInputStream()); Writer output = new StringWriter()) { copy(input, output, encoding); return output.toString(); } catch (IOException e) { LOGGER.warn("Could not copy input", e); throw e; } } public List<HttpCookie> getCookieFromUrl() throws IOException { CookieManager cookieManager = new CookieManager(); CookieHandler.setDefault(cookieManager); cookieManager.setCookiePolicy(CookiePolicy.ACCEPT_ALL); URLConnection con = openConnection(); con.getHeaderFields(); // must be read to store the cookie try { return cookieManager.getCookieStore().get(source.toURI()); } catch (URISyntaxException e) { LOGGER.error("Unable to convert download URL to URI", e); return Collections.emptyList(); } } private void copy(InputStream in, Writer out, Charset encoding) throws IOException { InputStream monitoredInputStream = monitorInputStream(in); Reader r = new InputStreamReader(monitoredInputStream, encoding); try (BufferedReader read = new BufferedReader(r)) { String line; while ((line = read.readLine()) != null) { out.write(line); out.write("\n"); } } } /** * @deprecated use {@link #downloadToFile(Path)} */ @Deprecated public void downloadToFile(File destination) throws IOException { downloadToFile(destination.toPath()); } public void downloadToFile(Path destination) throws IOException { try (InputStream input = monitorInputStream(new BufferedInputStream(openConnection().getInputStream()))) { Files.copy(input, destination, StandardCopyOption.REPLACE_EXISTING); } catch (IOException e) { LOGGER.warn("Could not copy input", e); throw e; } } /** * Downloads the web resource to a temporary file. * * @return the path to the downloaded file. */ public Path downloadToTemporaryFile() throws IOException { // Determine file name and extension from source url String sourcePath = source.getPath(); // Take everything after the last '/' as name + extension String fileNameWithExtension = sourcePath.substring(sourcePath.lastIndexOf('/') + 1); String fileName = FileUtil.getFileName(fileNameWithExtension); String extension = "." + FileUtil.getFileExtension(fileNameWithExtension).orElse("tmp"); // Create temporary file and download to it Path file = Files.createTempFile(fileName, extension); downloadToFile(file); return file; } protected InputStream monitorInputStream(InputStream in) { return in; } @Override public String toString() { return "URLDownload{" + "source=" + source + '}'; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.oncrpc; import java.nio.ByteBuffer; import org.apache.commons.io.Charsets; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; /** * Utility class for building XDR messages based on RFC 4506. * * Key points of the format: * * <ul> * <li>Primitives are stored in big-endian order (i.e., the default byte order * of ByteBuffer).</li> * <li>Booleans are stored as an integer.</li> * <li>Each field in the message is always aligned by 4.</li> * </ul> * */ public final class XDR { private static final int DEFAULT_INITIAL_CAPACITY = 256; private static final int SIZEOF_INT = 4; private static final int SIZEOF_LONG = 8; private static final byte[] PADDING_BYTES = new byte[] { 0, 0, 0, 0 }; private ByteBuffer buf; public enum State { READING, WRITING, } private final State state; /** * Construct a new XDR message buffer. * * @param initialCapacity * the initial capacity of the buffer. */ public XDR(int initialCapacity) { this(ByteBuffer.allocate(initialCapacity), State.WRITING); } public XDR() { this(DEFAULT_INITIAL_CAPACITY); } public XDR(ByteBuffer buf, State state) { this.buf = buf; this.state = state; } /** * Wraps a byte array as a read-only XDR message. There's no copy involved, * thus it is the client's responsibility to ensure that the byte array * remains unmodified when using the XDR object. * * @param src * the byte array to be wrapped. */ public XDR(byte[] src) { this(ByteBuffer.wrap(src).asReadOnlyBuffer(), State.READING); } public XDR asReadOnlyWrap() { ByteBuffer b = buf.asReadOnlyBuffer(); if (state == State.WRITING) { b.flip(); } XDR n = new XDR(b, State.READING); return n; } public ByteBuffer buffer() { return buf.duplicate(); } public int size() { // TODO: This overloading intends to be compatible with the semantics of // the previous version of the class. This function should be separated into // two with clear semantics. return state == State.READING ? buf.limit() : buf.position(); } public int readInt() { Preconditions.checkState(state == State.READING); return buf.getInt(); } public void writeInt(int v) { ensureFreeSpace(SIZEOF_INT); buf.putInt(v); } public boolean readBoolean() { Preconditions.checkState(state == State.READING); return buf.getInt() != 0; } public void writeBoolean(boolean v) { ensureFreeSpace(SIZEOF_INT); buf.putInt(v ? 1 : 0); } public long readHyper() { Preconditions.checkState(state == State.READING); return buf.getLong(); } public void writeLongAsHyper(long v) { ensureFreeSpace(SIZEOF_LONG); buf.putLong(v); } public byte[] readFixedOpaque(int size) { Preconditions.checkState(state == State.READING); byte[] r = new byte[size]; buf.get(r); alignPosition(); return r; } public void writeFixedOpaque(byte[] src, int length) { ensureFreeSpace(alignUp(length)); buf.put(src, 0, length); writePadding(); } public void writeFixedOpaque(byte[] src) { writeFixedOpaque(src, src.length); } public byte[] readVariableOpaque() { Preconditions.checkState(state == State.READING); int size = readInt(); return readFixedOpaque(size); } public void writeVariableOpaque(byte[] src) { ensureFreeSpace(SIZEOF_INT + alignUp(src.length)); buf.putInt(src.length); writeFixedOpaque(src); } public String readString() { return new String(readVariableOpaque(), Charsets.UTF_8); } public void writeString(String s) { writeVariableOpaque(s.getBytes(Charsets.UTF_8)); } private void writePadding() { Preconditions.checkState(state == State.WRITING); int p = pad(buf.position()); ensureFreeSpace(p); buf.put(PADDING_BYTES, 0, p); } private int alignUp(int length) { return length + pad(length); } private int pad(int length) { switch (length % 4) { case 1: return 3; case 2: return 2; case 3: return 1; default: return 0; } } private void alignPosition() { buf.position(alignUp(buf.position())); } private void ensureFreeSpace(int size) { Preconditions.checkState(state == State.WRITING); if (buf.remaining() < size) { int newCapacity = buf.capacity() * 2; int newRemaining = buf.capacity() + buf.remaining(); while (newRemaining < size) { newRemaining += newCapacity; newCapacity *= 2; } ByteBuffer newbuf = ByteBuffer.allocate(newCapacity); buf.flip(); newbuf.put(buf); buf = newbuf; } } /** check if the rest of data has more than len bytes */ public static boolean verifyLength(XDR xdr, int len) { return xdr.buf.remaining() >= len; } static byte[] recordMark(int size, boolean last) { byte[] b = new byte[SIZEOF_INT]; ByteBuffer buf = ByteBuffer.wrap(b); buf.putInt(!last ? size : size | 0x80000000); return b; } /** Write an XDR message to a TCP ChannelBuffer */ public static ChannelBuffer writeMessageTcp(XDR request, boolean last) { Preconditions.checkState(request.state == XDR.State.WRITING); ByteBuffer b = request.buf.duplicate(); b.flip(); byte[] fragmentHeader = XDR.recordMark(b.limit(), last); ByteBuffer headerBuf = ByteBuffer.wrap(fragmentHeader); // TODO: Investigate whether making a copy of the buffer is necessary. return ChannelBuffers.copiedBuffer(headerBuf, b); } /** Write an XDR message to a UDP ChannelBuffer */ public static ChannelBuffer writeMessageUdp(XDR response) { Preconditions.checkState(response.state == XDR.State.READING); // TODO: Investigate whether making a copy of the buffer is necessary. return ChannelBuffers.copiedBuffer(response.buf); } public static int fragmentSize(byte[] mark) { ByteBuffer b = ByteBuffer.wrap(mark); int n = b.getInt(); return n & 0x7fffffff; } public static boolean isLastFragment(byte[] mark) { ByteBuffer b = ByteBuffer.wrap(mark); int n = b.getInt(); return (n & 0x80000000) != 0; } @VisibleForTesting public byte[] getBytes() { ByteBuffer d = asReadOnlyWrap().buffer(); byte[] b = new byte[d.remaining()]; d.get(b); return b; } }
/** * Package: MAG - VistA Imaging * WARNING: Per VHA Directive 2004-038, this routine should not be modified. * Date Created: Feb 4, 2008 * Site Name: Washington OI Field Office, Silver Spring, MD * @author VHAISWBECKEC * @version 1.0 * * ---------------------------------------------------------------- * Property of the US Government. * No permission to copy or redistribute this software is given. * Use of unreleased versions of this software requires the user * to execute a written test agreement with the VistA Imaging * Development Office of the Department of Veterans Affairs, * telephone (301) 734-0100. * * The Food and Drug Administration classifies this software as * a Class II medical device. As such, it may not be changed * in any way. Modifications to this software may result in an * adulterated medical device under 21CFR820, the use of which * is considered to be a violation of US Federal Statutes. * ---------------------------------------------------------------- */ package gov.va.med.imaging.exchange.business.taglib.patient; import gov.va.med.imaging.exchange.business.Patient; import javax.servlet.ServletRequest; import javax.servlet.http.HttpServletRequest; import javax.servlet.jsp.JspException; import org.apache.log4j.Logger; /** * This tag will generate a String that can be used as an href to an image. * It needs, by default, only the application path (servlet mapping ) of the WAI servlet. * The image specification is determined by the surrounding AbstractPatientTag. * The quality and accept type may be set using tag properties, or if left blank * will be defaulted. * Derived classes of this tag specify the image quality (Thumbnail, Reference, and Diagnostic). * * @author VHAISWBECKEC */ public abstract class AbstractImageHRefTag extends AbstractPatientPropertyTag { // a derived class may specify this key // in the pathInfoPattern protected final static String patientIcnParameterKey = "[patientIcn]"; protected abstract String getDefaultPathInfoPattern(); private String host = null; private String context = null; private String pathInfoPattern = getDefaultPathInfoPattern(); private String protocolOverride; private String targetSite; private Logger logger = Logger.getLogger(this.getClass()); /** * Set/Get the host portion of the URL. This property is optional * and, if not specified will default to a null, and will produce relative * references. * * @return */ public String getHost() { return host; } public void setHost(String host) { this.host = host; } /** * Set/Get the context portion of the URL. This property is optional * and, if not specified will default to a null, and will produce relative * references. * * The value returned from here must be consistent with the getContext() * method of HttpServletRequest. * * @return */ public String getContext() { return context == null ? null : context.length() == 0 ? "/" : context.charAt(0) == '/' ? context : ("/" + context); } public void setContextBase(String contextBase) { this.context = contextBase; } /** * The href is formed by substituting the enclosing image tags identifying information * into the UrlRegex string. The regex should include the following tags where the values * are to be substituted: * <imageUrn> * * If this property is not specified then the following values is used: * "<imageUrn>" * * @return */ public String getPathInfoPattern() { return pathInfoPattern == null || pathInfoPattern.length() == 0 ? getDefaultPathInfoPattern() : pathInfoPattern; } public void setPathInfoPattern(String urlRegex) { this.pathInfoPattern = urlRegex; } /** * @return the protocolOverride */ public String getProtocolOverride() { return protocolOverride; } /** * @param protocolOverride the protocolOverride to set */ public void setProtocolOverride(String protocolOverride) { this.protocolOverride = protocolOverride; } /** * @return the targetSite */ public String getTargetSite() { return targetSite; } /** * @param targetSite the targetSite to set */ public void setTargetSite(String targetSite) { this.targetSite = targetSite; } /** * @see gov.va.med.imaging.exchange.business.taglib.image.AbstractImagePropertyTag#getElementValue() */ @Override protected String getElementValue() throws JspException { String requestContext = null; try { ServletRequest servletRequest = this.pageContext.getRequest(); HttpServletRequest req = (HttpServletRequest)servletRequest; requestContext = req.getContextPath(); } catch (ClassCastException e1) { logger.warn("Unable to cast request to HttpServletRequest, tag library expects to be running over HTTP, continuing ..."); } String pathInfo = buildPathInfo(); // build the path to the image servlet StringBuilder sb = new StringBuilder(); // if the host is specified then append the host name if(getHost() != null ) sb.append(getHost()); // if the context base is specified then use that value // else use this request's context if(getContext() != null ) sb.append(getContext()); // append a '/' if one is not there already and a context was specified if(sb.length() != 0 && '/' != sb.charAt(sb.length()-1)) sb.append("/"); // always append the path info sb.append(pathInfo); // if the protocol override AND the target site are provided then // tack them onto the URL as query parameters if(getProtocolOverride() != null && getProtocolOverride().length() > 0 && getTargetSite() != null && getTargetSite().length() > 0 ) { sb.append("?"); sb.append("protocolOverride="); sb.append(getProtocolOverride()); sb.append("&"); sb.append("targetSite="); sb.append(getTargetSite()); } return sb.toString(); } /** * Build the pathInfo portion of the image URL using the pattern * in getPathInfo and the image URN or the patient ICN values from the * ancestor Image element. * * @return * @throws JspException */ private String buildPathInfo() throws JspException { String pathInfo = getPathInfoPattern(); Patient patient = this.getPatient(); if(pathInfo.contains(patientIcnParameterKey)) { String patientIcnExternal = null; patientIcnExternal = patient.getPatientIcn(); pathInfo = pathInfo.replace(patientIcnParameterKey, patientIcnExternal); } return pathInfo; } }
package net.finmath.montecarlo.interestrate.products; import java.time.LocalDate; import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.function.Function; import java.util.function.IntToDoubleFunction; import java.util.function.ToDoubleFunction; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import net.finmath.exception.CalculationException; import net.finmath.modelling.products.Swaption; import net.finmath.montecarlo.MonteCarloSimulationModel; import net.finmath.montecarlo.RandomVariableFromDoubleArray; import net.finmath.montecarlo.conditionalexpectation.MonteCarloConditionalExpectationLinearRegressionFactory; import net.finmath.montecarlo.conditionalexpectation.MonteCarloConditionalExpectationRegressionFactory; import net.finmath.montecarlo.conditionalexpectation.RegressionBasisFunctionsProvider; import net.finmath.montecarlo.interestrate.LIBORModelMonteCarloSimulationModel; import net.finmath.montecarlo.interestrate.TermStructureMonteCarloSimulationModel; import net.finmath.montecarlo.process.ProcessTimeDiscretizationProvider; import net.finmath.stochastic.ConditionalExpectationEstimator; import net.finmath.stochastic.RandomVariable; import net.finmath.stochastic.Scalar; import net.finmath.time.FloatingpointDate; import net.finmath.time.Period; import net.finmath.time.Schedule; import net.finmath.time.TimeDiscretization; import net.finmath.time.TimeDiscretizationFromArray; /** * Implements the valuation of a Bermudan swaption under a <code>LIBORModelMonteCarloSimulationModel</code> * * @author Christian Fries * @version 1.4 * @date 06.12.2009 * @date 07.04.2019 */ public class BermudanSwaptionFromSwapSchedules extends AbstractLIBORMonteCarloProduct implements RegressionBasisFunctionsProvider, ProcessTimeDiscretizationProvider, Swaption { private static Logger logger = Logger.getLogger("net.finmath"); public enum SwaptionType{ PAYER, RECEIVER } private final LocalDateTime referenceDate; private final SwaptionType swaptionType; private final LocalDate[] exerciseDates; private final LocalDate swapEndDate; private final double[] swaprates; private final double[] notionals; private final Schedule[] fixSchedules; private final Schedule[] floatSchedules; private final RegressionBasisFunctionsProvider regressionBasisFunctionProvider; private final MonteCarloConditionalExpectationRegressionFactory conditionalExpectationRegressionFactory; private final boolean isUseAnalyticSwapValuationAtExercise = true; /** * Create a Bermudan swaption from an array of underlying swap schedules (fix leg and float leg), swap rates and notionals. * * This class implements the class backward algorithm using a provided factory for the * determination of the conditional expectation. * * For <code>conditionalExpectationRegressionFactory</code> you may pass * <code>new MonteCarloConditionalExpectationLinearRegressionFactory()</code> (default) or, e.g., <code>new MonteCarloConditionalExpectationLocalizedOnDependentRegressionFactory(2.0)</code>. * * @param referenceDate The date associated with the inception (t=0) of this product. (Not used). * @param swaptionType The type of the underlying swap (PAYER, RECEIVER). * @param exerciseDates A vector of exercise dates. * @param swapEndDate The final maturity of the underlying swap. * @param swaprates A vector of swap rates for the underlying swaps. * @param notionals A vector of notionals for the underlying swaps. * @param fixSchedules A vector of fix leg schedules for the underlying swaps. * @param floatSchedules A vector of float leg schedules for the underlying swaps. * @param conditionalExpectationRegressionFactory A object implementing a factory creating a conditional expectation estimator from given regression basis functions * @param regressionBasisFunctionProvider An object implementing RegressionBasisFunctionsProvider to provide the basis functions for the estimation of conditional expectations. */ public BermudanSwaptionFromSwapSchedules(final LocalDateTime referenceDate, final SwaptionType swaptionType, final LocalDate[] exerciseDates, final LocalDate swapEndDate, final double[] swaprates, final double[] notionals, final Schedule[] fixSchedules, final Schedule[] floatSchedules, final MonteCarloConditionalExpectationRegressionFactory conditionalExpectationRegressionFactory, final RegressionBasisFunctionsProvider regressionBasisFunctionProvider) { this.referenceDate = referenceDate; this.swaptionType = swaptionType; this.swapEndDate = swapEndDate; this.swaprates = swaprates; this.notionals = notionals; this.exerciseDates = exerciseDates; this.fixSchedules = fixSchedules; this.floatSchedules = floatSchedules; this.regressionBasisFunctionProvider = regressionBasisFunctionProvider != null ? regressionBasisFunctionProvider : this; this.conditionalExpectationRegressionFactory = conditionalExpectationRegressionFactory; } /** * Create a Bermudan swaption. * * @param referenceDate The date associated with the inception (t=0) of this product. (Not used). * @param swaptionType The type of the underlying swap (PAYER, RECEIVER). * @param exerciseDates A vector of exercise dates. * @param swapEndDate The final maturity of the underlying swap. * @param swaprates A vector of swap rates for the underlying swaps. * @param notionals A vector of notionals for the underlying swaps. * @param fixSchedules A vector of fix leg schedules for the underlying swaps. * @param floatSchedules A vector of float leg schedules for the underlying swaps. * @param regressionBasisFunctionProvider An object implementing RegressionBasisFunctionsProvider to provide the basis functions for the estimation of conditional expectations. */ public BermudanSwaptionFromSwapSchedules(final LocalDateTime referenceDate, final SwaptionType swaptionType, final LocalDate[] exerciseDates, final LocalDate swapEndDate, final double[] swaprates, final double[] notionals, final Schedule[] fixSchedules, final Schedule[] floatSchedules, final RegressionBasisFunctionsProvider regressionBasisFunctionProvider) { this(referenceDate, swaptionType, exerciseDates, swapEndDate, swaprates,notionals, fixSchedules, floatSchedules, new MonteCarloConditionalExpectationLinearRegressionFactory(), regressionBasisFunctionProvider); } /** * Create a Bermudan swaption. * * @param referenceDate The date associated with the inception (t=0) of this product. (Not used). * @param swaptionType The type of the underlying swap (PAYER, RECEIVER). * @param exerciseDates A vector of exercise dates. * @param swapEndDate The final maturity of the underlying swap. * @param swaprates A vector of swap rates for the underlying swaps. * @param notionals A vector of notionals for the underlying swaps. * @param fixSchedules A vector of fix leg schedules for the underlying swaps. * @param floatSchedules A vector of float leg schedules for the underlying swaps. */ public BermudanSwaptionFromSwapSchedules(final LocalDateTime referenceDate, final SwaptionType swaptionType, final LocalDate[] exerciseDates, final LocalDate swapEndDate, final double[] swaprates, final double[] notionals, final Schedule[] fixSchedules, final Schedule[] floatSchedules) { this(referenceDate, swaptionType, exerciseDates, swapEndDate, swaprates,notionals, fixSchedules, floatSchedules, null); } /** * Create a Bermudan swaption. * * @param referenceDate The date associated with the inception (t=0) of this product. * @param swaptionType The type of the underlying swap (PAYER, RECEIVER). * @param exerciseDates A vector of exercise dates. * @param swapEndDate The final maturity of the underlying swap. * @param swaprate A constant swaprate applying to all underlying swaps. * @param notional A constant notional applying to all underlying swaps. * @param fixSchedules A vector of fix leg schedules for the underlying swaps. * @param floatSchedules A vector of float leg schedules for the underlying swaps. */ public BermudanSwaptionFromSwapSchedules(final LocalDateTime referenceDate, final SwaptionType swaptionType, final LocalDate[] exerciseDates, final LocalDate swapEndDate, final double swaprate, final double notional, final Schedule[] fixSchedules, final Schedule[] floatSchedules) { this(referenceDate, swaptionType, exerciseDates, swapEndDate, IntStream.range(0, exerciseDates.length).mapToDouble(new IntToDoubleFunction() { @Override public double applyAsDouble(final int i) { return swaprate; } }).toArray(), IntStream.range(0, exerciseDates.length).mapToDouble(new IntToDoubleFunction() { @Override public double applyAsDouble(final int i) { return notional; } }).toArray(), fixSchedules, floatSchedules); } /** * Returns the exercise dates. * * @return the exercise dates */ public LocalDate[] getExerciseDates() { return exerciseDates; } /** * @return the swaptionType */ public SwaptionType getSwaptionType() { return swaptionType; } /** * @return the swapEndDate */ public LocalDate getSwapEndDate() { return swapEndDate; } @Override public Map<String, Object> getValues(final double evaluationTime, final TermStructureMonteCarloSimulationModel model) throws CalculationException { final LocalDate modelReferenceDate = model.getReferenceDate().toLocalDate(); RandomVariable values = model.getRandomVariableForConstant(0.0); RandomVariable exerciseTimes = new Scalar(Double.POSITIVE_INFINITY); RandomVariable valuesUnderlying = model.getRandomVariableForConstant(0.0); for(int exerciseIndex = exerciseDates.length - 1; exerciseIndex >=0; exerciseIndex--) { final double exerciseTime = FloatingpointDate.getFloatingPointDateFromDate(modelReferenceDate, exerciseDates[exerciseIndex]); final RandomVariable discountedCashflowFixLeg = getValueUnderlyingNumeraireRelative(model, fixSchedules[exerciseIndex], false, swaprates[exerciseIndex], notionals[exerciseIndex]); final RandomVariable discountedCashflowFloatingLeg = getValueUnderlyingNumeraireRelative(model, floatSchedules[exerciseIndex], true, 0.0, notionals[exerciseIndex]); // Distinguish whether the swaption is of type "Payer" or "Receiver": if(swaptionType.equals(SwaptionType.PAYER)) { final RandomVariable discountedPayoff = discountedCashflowFloatingLeg.sub(discountedCashflowFixLeg); valuesUnderlying = discountedPayoff;//valuesUnderlying.add(discountedPayoff); } else if(swaptionType.equals(SwaptionType.RECEIVER)){ final RandomVariable discountedPayoff = discountedCashflowFixLeg.sub(discountedCashflowFloatingLeg); valuesUnderlying = discountedPayoff;//valuesUnderlying.add(discountedPayoff); } final RandomVariable discountedTriggerValues = values.sub(valuesUnderlying); // Remove foresight through condition expectation final ConditionalExpectationEstimator conditionalExpectationOperator = getConditionalExpectationEstimator(exerciseTime, model); // Calculate conditional expectation. Note that no discounting (numeraire division) is required! final RandomVariable triggerValues = discountedTriggerValues.getConditionalExpectation(conditionalExpectationOperator); // Apply the exercise criteria // if triggerValues(omega) <=0 choose valuesUnderlying else values values = triggerValues.choose(values, valuesUnderlying); exerciseTimes = triggerValues.choose(exerciseTimes, new Scalar(exerciseTime)); } // Logging the exercise probabilities for every exercise time. if(logger.isLoggable(Level.FINE)) { logger.fine("Exercise probabilitie " + getExerciseProbabilitiesFromTimes(model.getReferenceDate(), exerciseTimes)); double probabilityToExercise = 1.0; for(int exerciseIndex = 0; exerciseIndex < exerciseDates.length; exerciseIndex++) { final double exerciseTime = FloatingpointDate.getFloatingPointDateFromDate(modelReferenceDate, exerciseDates[exerciseIndex]); final double probabilityToExerciseAfter = exerciseTimes.sub(exerciseTime+1.0/365.0).choose(new Scalar(1.0), new Scalar(0.0)).getAverage(); final double probability = probabilityToExercise - probabilityToExerciseAfter; probabilityToExercise = probabilityToExerciseAfter; logger.finer("Exercise " + (exerciseIndex+1) + " on " + exerciseDates[exerciseIndex] + " with probability " + probability); } logger.finer("No exercise with probability " + probabilityToExercise); } // Note that values is a relative price - no numeraire division is required final RandomVariable numeraireAtZero = model.getNumeraire(evaluationTime); final RandomVariable monteCarloProbabilitiesAtZero = model.getMonteCarloWeights(evaluationTime); values = values.mult(numeraireAtZero).div(monteCarloProbabilitiesAtZero); final Map<String, Object> results = new HashMap<>(); results.put("values", values); results.put("exerciseTimes", exerciseTimes); return results; } @Override public RandomVariable getValue(final double evaluationTime, final TermStructureMonteCarloSimulationModel model) throws CalculationException { return (RandomVariable) getValues(evaluationTime, model).get("values"); } /** * Determines the vector of exercise probabilities for a given {@link RandomVariable} of exerciseTimes. * The exerciseTimes is a random variable of {@link FloatingpointDate} offsets from a given referenceDate. * * @param localDateTime A given reference date. * @param exerciseTimes A {@link RandomVariable} of exercise times given as {@link FloatingpointDate} offsets from the given referenceDate. * @return A vector of exercise probabilities. The length of the vector is <code>exerciseDates.length+1</code>. The last entry is the probability that no exercise occurs. */ public double[] getExerciseProbabilitiesFromTimes(final LocalDateTime localDateTime, final RandomVariable exerciseTimes) { final double[] exerciseProbabilities = new double[exerciseDates.length+1]; double probabilityToExercise = 1.0; for(int exerciseIndex = 0; exerciseIndex < exerciseDates.length; exerciseIndex++) { final double exerciseTime = FloatingpointDate.getFloatingPointDateFromDate(localDateTime, exerciseDates[exerciseIndex].atStartOfDay()); final double probabilityToExerciseAfter = exerciseTimes.sub(exerciseTime+1.0/365.0).choose(new Scalar(1.0), new Scalar(0.0)).getAverage(); exerciseProbabilities[exerciseIndex] = probabilityToExercise - probabilityToExerciseAfter; probabilityToExercise = probabilityToExerciseAfter; } exerciseProbabilities[exerciseDates.length] = probabilityToExercise; return exerciseProbabilities; } @Override public TimeDiscretization getProcessTimeDiscretization(final LocalDateTime referenceDate) { final Set<Double> times = new HashSet<>(); for(int exerciseDateIndex = 0; exerciseDateIndex < exerciseDates.length; exerciseDateIndex++) { times.add(FloatingpointDate.getFloatingPointDateFromDate(referenceDate, exerciseDates[exerciseDateIndex].atStartOfDay())); final Schedule scheduleFixedLeg = fixSchedules[exerciseDateIndex]; final Schedule scheduleFloatLeg = floatSchedules[exerciseDateIndex]; final Function<Period, Double> periodToTime = new Function<Period, Double>() { @Override public Double apply(final Period period) { return FloatingpointDate.getFloatingPointDateFromDate(referenceDate, period.getPayment().atStartOfDay()); } }; times.addAll(scheduleFixedLeg.getPeriods().stream().map(periodToTime).collect(Collectors.toList())); times.addAll(scheduleFloatLeg.getPeriods().stream().map(periodToTime).collect(Collectors.toList())); } return new TimeDiscretizationFromArray(times); } /** * Calculated the numeraire relative value of an underlying swap leg. * * @param model The Monte Carlo model. * @param legSchedule The schedule of the leg. * @param paysFloat If true a floating rate is payed. * @param swaprate The swaprate. May be 0.0 for pure floating leg. * @param notional The notional. * @return The sum of the numeraire relative cash flows. * @throws CalculationException Thrown if underlying model failed to calculate stochastic process. */ private RandomVariable getValueUnderlyingNumeraireRelative(final TermStructureMonteCarloSimulationModel model, final Schedule legSchedule, final boolean paysFloat, final double swaprate, final double notional) throws CalculationException { if(isUseAnalyticSwapValuationAtExercise) { final double valuationTime = FloatingpointDate.getFloatingPointDateFromDate(model.getReferenceDate().toLocalDate(), legSchedule.getPeriod(0).getFixing()); final RandomVariable numeraireAtValuationTime = model.getNumeraire(valuationTime); final RandomVariable monteCarloProbabilitiesAtValuationTime = model.getMonteCarloWeights(valuationTime); RandomVariable value = SwaptionFromSwapSchedules.getValueOfLegAnalytic(valuationTime, model, legSchedule, paysFloat, swaprate, notional); value = value.div(model.getNumeraire(valuationTime)).mult(monteCarloProbabilitiesAtValuationTime); return value; } else { RandomVariable value = model.getRandomVariableForConstant(0.0); for(int periodIndex = legSchedule.getNumberOfPeriods() - 1; periodIndex >= 0; periodIndex--) { final double fixingTime = FloatingpointDate.getFloatingPointDateFromDate(model.getReferenceDate().toLocalDate(), legSchedule.getPeriod(periodIndex).getFixing()); final double paymentTime = FloatingpointDate.getFloatingPointDateFromDate(model.getReferenceDate().toLocalDate(), legSchedule.getPeriod(periodIndex).getPayment()); final double periodLength = legSchedule.getPeriodLength(periodIndex); final RandomVariable numeraireAtPayment = model.getNumeraire(paymentTime); final RandomVariable monteCarloProbabilitiesAtPayment = model.getMonteCarloWeights(paymentTime); if(swaprate != 0.0) { final RandomVariable periodCashFlowFix = model.getRandomVariableForConstant(swaprate * periodLength * notional).div(numeraireAtPayment).mult(monteCarloProbabilitiesAtPayment); value = value.add(periodCashFlowFix); } if(paysFloat) { final RandomVariable libor = model.getForwardRate(fixingTime, fixingTime, paymentTime); final RandomVariable periodCashFlowFloat = libor.mult(periodLength).mult(notional).div(numeraireAtPayment).mult(monteCarloProbabilitiesAtPayment); value = value.add(periodCashFlowFloat); } } return value; } } /** * The conditional expectation is calculated using a Monte-Carlo regression technique. * * @param exerciseTime The exercise time * @param model The valuation model * @return The condition expectation estimator * @throws CalculationException Thrown if underlying model failed to calculate stochastic process. */ public ConditionalExpectationEstimator getConditionalExpectationEstimator(final double exerciseTime, final TermStructureMonteCarloSimulationModel model) throws CalculationException { final RandomVariable[] regressionBasisFunctions = regressionBasisFunctionProvider.getBasisFunctions(exerciseTime, model); return conditionalExpectationRegressionFactory.getConditionalExpectationEstimator(regressionBasisFunctions, regressionBasisFunctions); } @Override public RandomVariable[] getBasisFunctions(final double evaluationTime, final MonteCarloSimulationModel model) throws CalculationException { final LIBORModelMonteCarloSimulationModel liborModel = (LIBORModelMonteCarloSimulationModel)model; return getBasisFunctions(evaluationTime, liborModel); } /** * Provides a set of \( \mathcal{F}_{t} \)-measurable random variables which can serve as regression basis functions. * * @param evaluationTime The evaluation time \( t \) at which the basis function should be observed. * @param model The Monte-Carlo model used to derive the basis function. * @return An \( \mathcal{F}_{t} \)-measurable random variable. * @throws CalculationException Thrown if derivation of the basis function fails. */ public RandomVariable[] getBasisFunctions(final double evaluationTime, final LIBORModelMonteCarloSimulationModel model) throws CalculationException { final LocalDateTime modelReferenceDate = model.getReferenceDate(); final double[] regressionBasisfunctionTimes = Stream.concat(Arrays.stream(exerciseDates),Stream.of(swapEndDate)).mapToDouble(new ToDoubleFunction<LocalDate>() { @Override public double applyAsDouble(final LocalDate date) { return FloatingpointDate.getFloatingPointDateFromDate(modelReferenceDate, date.atStartOfDay()); } }).sorted().toArray(); final ArrayList<RandomVariable> basisFunctions = new ArrayList<>(); final double exerciseTime = evaluationTime; int exerciseIndex = Arrays.binarySearch(regressionBasisfunctionTimes, exerciseTime); if(exerciseIndex < 0) { exerciseIndex = -exerciseIndex; } if(exerciseIndex >= exerciseDates.length) { exerciseIndex = exerciseDates.length-1; } // Constant final RandomVariable one = new RandomVariableFromDoubleArray(1.0); basisFunctions.add(one); // Numeraire (adapted to multicurve framework) final RandomVariable discountFactor = model.getNumeraire(exerciseTime).invert(); basisFunctions.add(discountFactor); /* * Add swap rates of underlyings. */ for(int exerciseIndexUnderlying = exerciseIndex; exerciseIndexUnderlying<exerciseDates.length; exerciseIndexUnderlying++) { final RandomVariable floatLeg = SwaptionFromSwapSchedules.getValueOfLegAnalytic(exerciseTime, model, floatSchedules[exerciseIndexUnderlying], true, 0.0, 1.0); final RandomVariable annuity = SwaptionFromSwapSchedules.getValueOfLegAnalytic(exerciseTime, model, fixSchedules[exerciseIndexUnderlying], false, 1.0, 1.0); final RandomVariable swapRate = floatLeg.div(annuity); final RandomVariable basisFunction = swapRate.mult(discountFactor); basisFunctions.add(basisFunction); basisFunctions.add(basisFunction.squared()); } // forward rate to the next period final RandomVariable rateShort = model.getForwardRate(exerciseTime, exerciseTime, regressionBasisfunctionTimes[exerciseIndex + 1]); basisFunctions.add(rateShort.mult(discountFactor)); basisFunctions.add(rateShort.mult(discountFactor).pow(2.0)); return basisFunctions.toArray(new RandomVariable[basisFunctions.size()]); } /* * Some popular variants to create regression basis functions */ public RegressionBasisFunctionsProvider getBasisFunctionsProviderWithSwapRates() { return new RegressionBasisFunctionsProvider() { @Override public RandomVariable[] getBasisFunctions(final double evaluationTime, final MonteCarloSimulationModel monteCarloModel) throws CalculationException { final LIBORModelMonteCarloSimulationModel model = (LIBORModelMonteCarloSimulationModel)monteCarloModel; final LocalDateTime modelReferenceDate = model.getReferenceDate(); final double[] regressionBasisfunctionTimes = Stream.concat(Arrays.stream(exerciseDates),Stream.of(swapEndDate)).mapToDouble(new ToDoubleFunction<LocalDate>() { @Override public double applyAsDouble(final LocalDate date) { return FloatingpointDate.getFloatingPointDateFromDate(modelReferenceDate, date.atStartOfDay()); } }).sorted().toArray(); final ArrayList<RandomVariable> basisFunctions = new ArrayList<>(); final double exerciseTime = evaluationTime; int exerciseIndex = Arrays.binarySearch(regressionBasisfunctionTimes, exerciseTime); if(exerciseIndex < 0) { exerciseIndex = -exerciseIndex; } if(exerciseIndex >= exerciseDates.length) { exerciseIndex = exerciseDates.length-1; } // Constant final RandomVariable one = new RandomVariableFromDoubleArray(1.0); final RandomVariable basisFunction = one; basisFunctions.add(basisFunction); /* * Add swap rates of underlyings. */ for(int exerciseIndexUnderlying = exerciseIndex; exerciseIndexUnderlying<exerciseDates.length; exerciseIndexUnderlying++) { final RandomVariable floatLeg = SwaptionFromSwapSchedules.getValueOfLegAnalytic(exerciseTime, model, floatSchedules[exerciseIndexUnderlying], true, 0.0, 1.0); final RandomVariable annuity = SwaptionFromSwapSchedules.getValueOfLegAnalytic(exerciseTime, model, fixSchedules[exerciseIndexUnderlying], false, 1.0, 1.0); final RandomVariable swapRate = floatLeg.div(annuity); basisFunctions.add(swapRate); } // forward rate to the next period final RandomVariable rateShort = model.getForwardRate(exerciseTime, exerciseTime, regressionBasisfunctionTimes[exerciseIndex + 1]); basisFunctions.add(rateShort); basisFunctions.add(rateShort.pow(2.0)); // Numeraire (adapted to multicurve framework) final RandomVariable discountFactor = model.getNumeraire(exerciseTime).invert(); basisFunctions.add(discountFactor); return basisFunctions.toArray(new RandomVariable[basisFunctions.size()]); } }; } public RegressionBasisFunctionsProvider getBasisFunctionsProviderWithForwardRates() { return new RegressionBasisFunctionsProvider() { @Override public RandomVariable[] getBasisFunctions(final double evaluationTime, final MonteCarloSimulationModel monteCarloModel) throws CalculationException { final LIBORModelMonteCarloSimulationModel model = (LIBORModelMonteCarloSimulationModel)monteCarloModel; final LocalDateTime modelReferenceDate = model.getReferenceDate(); final double[] regressionBasisfunctionTimes = Stream.concat(Arrays.stream(exerciseDates),Stream.of(swapEndDate)).mapToDouble(new ToDoubleFunction<LocalDate>() { @Override public double applyAsDouble(final LocalDate date) { return FloatingpointDate.getFloatingPointDateFromDate(modelReferenceDate, date.atStartOfDay()); } }).sorted().toArray(); final ArrayList<RandomVariable> basisFunctions = new ArrayList<>(); final double swapMaturity = FloatingpointDate.getFloatingPointDateFromDate(referenceDate, swapEndDate.atStartOfDay()); final double exerciseTime = evaluationTime; int exerciseIndex = Arrays.binarySearch(regressionBasisfunctionTimes, exerciseTime); if(exerciseIndex < 0) { exerciseIndex = -exerciseIndex; } if(exerciseIndex >= exerciseDates.length) { exerciseIndex = exerciseDates.length-1; } // Constant final RandomVariable one = new RandomVariableFromDoubleArray(1.0); final RandomVariable basisFunction = one; basisFunctions.add(basisFunction); // forward rate to the next period final RandomVariable rateShort = model.getForwardRate(exerciseTime, exerciseTime, regressionBasisfunctionTimes[exerciseIndex + 1]); basisFunctions.add(rateShort); basisFunctions.add(rateShort.pow(2.0)); // forward rate to the end of the product final RandomVariable rateLong = model.getForwardRate(exerciseTime, regressionBasisfunctionTimes[exerciseIndex], swapMaturity); basisFunctions.add(rateLong); basisFunctions.add(rateLong.pow(2.0)); // Numeraire (adapted to multicurve framework) final RandomVariable discountFactor = model.getNumeraire(exerciseTime).invert(); basisFunctions.add(discountFactor); // Cross basisFunctions.add(rateLong.mult(discountFactor)); return basisFunctions.toArray(new RandomVariable[basisFunctions.size()]); } }; } @Override public String toString() { return "BermudanSwaptionFromSwapSchedules[type: " + swaptionType.toString() + ", " +"exerciseDate: " + Arrays.toString(exerciseDates) +", " + "swapEndDate: " + swapEndDate + ", " + "strike: " + Arrays.toString(swaprates) + ", " + "floatingTenor: " + Arrays.toString(floatSchedules) + ", " + "fixTenor: " + Arrays.toString(fixSchedules) + "]"; } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.importexport.model; import java.io.Serializable; import com.amazonaws.AmazonWebServiceRequest; /** * Input structure for the UpateJob operation. */ public class UpdateJobRequest extends AmazonWebServiceRequest implements Serializable, Cloneable { private String jobId; private String manifest; private String jobType; private Boolean validateOnly; private String aPIVersion; /** * @param jobId */ public void setJobId(String jobId) { this.jobId = jobId; } /** * @return */ public String getJobId() { return this.jobId; } /** * @param jobId * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateJobRequest withJobId(String jobId) { setJobId(jobId); return this; } /** * @param manifest */ public void setManifest(String manifest) { this.manifest = manifest; } /** * @return */ public String getManifest() { return this.manifest; } /** * @param manifest * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateJobRequest withManifest(String manifest) { setManifest(manifest); return this; } /** * @param jobType * @see JobType */ public void setJobType(String jobType) { this.jobType = jobType; } /** * @return * @see JobType */ public String getJobType() { return this.jobType; } /** * @param jobType * @return Returns a reference to this object so that method calls can be * chained together. * @see JobType */ public UpdateJobRequest withJobType(String jobType) { setJobType(jobType); return this; } /** * @param jobType * @return Returns a reference to this object so that method calls can be * chained together. * @see JobType */ public void setJobType(JobType jobType) { this.jobType = jobType.toString(); } /** * @param jobType * @return Returns a reference to this object so that method calls can be * chained together. * @see JobType */ public UpdateJobRequest withJobType(JobType jobType) { setJobType(jobType); return this; } /** * @param validateOnly */ public void setValidateOnly(Boolean validateOnly) { this.validateOnly = validateOnly; } /** * @return */ public Boolean getValidateOnly() { return this.validateOnly; } /** * @param validateOnly * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateJobRequest withValidateOnly(Boolean validateOnly) { setValidateOnly(validateOnly); return this; } /** * @return */ public Boolean isValidateOnly() { return this.validateOnly; } /** * @param aPIVersion */ public void setAPIVersion(String aPIVersion) { this.aPIVersion = aPIVersion; } /** * @return */ public String getAPIVersion() { return this.aPIVersion; } /** * @param aPIVersion * @return Returns a reference to this object so that method calls can be * chained together. */ public UpdateJobRequest withAPIVersion(String aPIVersion) { setAPIVersion(aPIVersion); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getJobId() != null) sb.append("JobId: " + getJobId() + ","); if (getManifest() != null) sb.append("Manifest: " + getManifest() + ","); if (getJobType() != null) sb.append("JobType: " + getJobType() + ","); if (getValidateOnly() != null) sb.append("ValidateOnly: " + getValidateOnly() + ","); if (getAPIVersion() != null) sb.append("APIVersion: " + getAPIVersion()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateJobRequest == false) return false; UpdateJobRequest other = (UpdateJobRequest) obj; if (other.getJobId() == null ^ this.getJobId() == null) return false; if (other.getJobId() != null && other.getJobId().equals(this.getJobId()) == false) return false; if (other.getManifest() == null ^ this.getManifest() == null) return false; if (other.getManifest() != null && other.getManifest().equals(this.getManifest()) == false) return false; if (other.getJobType() == null ^ this.getJobType() == null) return false; if (other.getJobType() != null && other.getJobType().equals(this.getJobType()) == false) return false; if (other.getValidateOnly() == null ^ this.getValidateOnly() == null) return false; if (other.getValidateOnly() != null && other.getValidateOnly().equals(this.getValidateOnly()) == false) return false; if (other.getAPIVersion() == null ^ this.getAPIVersion() == null) return false; if (other.getAPIVersion() != null && other.getAPIVersion().equals(this.getAPIVersion()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getJobId() == null) ? 0 : getJobId().hashCode()); hashCode = prime * hashCode + ((getManifest() == null) ? 0 : getManifest().hashCode()); hashCode = prime * hashCode + ((getJobType() == null) ? 0 : getJobType().hashCode()); hashCode = prime * hashCode + ((getValidateOnly() == null) ? 0 : getValidateOnly() .hashCode()); hashCode = prime * hashCode + ((getAPIVersion() == null) ? 0 : getAPIVersion().hashCode()); return hashCode; } @Override public UpdateJobRequest clone() { return (UpdateJobRequest) super.clone(); } }
/* * Copyright (c) 2008-2011, Piccolo2D project, http://piccolo2d.org * Copyright (c) 1998-2008, University of Maryland * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided * that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions * and the following disclaimer in the documentation and/or other materials provided with the * distribution. * * None of the name of the University of Maryland, the name of the Piccolo2D project, or the names of its * contributors may be used to endorse or promote products derived from this software without specific * prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.piccolo2d.extras.swt; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Paint; import java.awt.Shape; import java.awt.geom.AffineTransform; import java.awt.geom.Arc2D; import java.awt.geom.Ellipse2D; import java.awt.geom.GeneralPath; import java.awt.geom.Line2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.awt.geom.RoundRectangle2D; import org.piccolo2d.PNode; import org.piccolo2d.nodes.PPath; import org.piccolo2d.util.PAffineTransform; import org.piccolo2d.util.PAffineTransformException; import org.piccolo2d.util.PBounds; import org.piccolo2d.util.PPaintContext; /** * <b>PSWTPath</b> is a wrapper around a java.awt.geom.GeneralPath, with * workarounds for drawing shapes in SWT where necessary. * * @version 1.0 * @author Jesse Grosjean */ public class PSWTPath extends PNode { private static final long serialVersionUID = 1L; /** * The property name that identifies a change of this node's path. In any * property change event the new value will be a reference to this node's * path, but old value will always be null. */ public static final String PROPERTY_SHAPE = "shape"; private static final String PROPERTY_PATH = "path"; private static final int PROPERTY_CODE_PATH = 1 << 18; private static final String PROPERTY_STROKE_PAINT = "strokePaint"; private static final int PROPERTY_CODE_STROKE_PAINT = 1 << 16; private static final double BOUNDS_TOLERANCE = 0.01; private static final Rectangle2D.Float TEMP_RECTANGLE = new Rectangle2D.Float(); private static final RoundRectangle2D.Float TEMP_ROUNDRECTANGLE = new RoundRectangle2D.Float(); private static final Ellipse2D.Float TEMP_ELLIPSE = new Ellipse2D.Float(); private static final Color DEFAULT_STROKE_PAINT = Color.black; private static final BasicStroke BASIC_STROKE = new BasicStroke(); private static final float PEN_WIDTH = 1f; private static final float DEFAULT_TRANSPARENCY = 1.0f; private Paint strokePaint; private boolean updatingBoundsFromPath; private Shape origShape; private Shape shape; private PAffineTransform internalXForm; private AffineTransform inverseXForm; private double[] shapePts; private float transparency = DEFAULT_TRANSPARENCY; /** * Creates a path representing the rectangle provided. * * @param x left of rectangle * @param y top of rectangle * @param width width of rectangle * @param height height of rectangle * @return created rectangle */ public static PSWTPath createRectangle(final float x, final float y, final float width, final float height) { TEMP_RECTANGLE.setFrame(x, y, width, height); final PSWTPath result = new PSWTPath(TEMP_RECTANGLE); result.setPaint(Color.white); return result; } /** * Creates a path representing the rounded rectangle provided. * * @param x left of rectangle * @param y top of rectangle * @param width width of rectangle * @param height height of rectangle * @param arcWidth width of the arc at the corners * @param arcHeight height of arc at the corners * @return created rounded rectangle */ public static PSWTPath createRoundRectangle(final float x, final float y, final float width, final float height, final float arcWidth, final float arcHeight) { TEMP_ROUNDRECTANGLE.setRoundRect(x, y, width, height, arcWidth, arcHeight); final PSWTPath result = new PSWTPath(TEMP_ROUNDRECTANGLE); result.setPaint(Color.white); return result; } /** * Creates a path representing an ellipse that covers the rectangle * provided. * * @param x left of rectangle * @param y top of rectangle * @param width width of rectangle * @param height height of rectangle * @return created ellipse */ public static PSWTPath createEllipse(final float x, final float y, final float width, final float height) { TEMP_ELLIPSE.setFrame(x, y, width, height); final PSWTPath result = new PSWTPath(TEMP_ELLIPSE); result.setPaint(Color.white); return result; } /** * Creates a PPath for the poly-line for the given points. * * @param points array of points for the point lines * * @return created poly-line for the given points */ public static PSWTPath createPolyline(final Point2D[] points) { final PSWTPath result = new PSWTPath(); result.setPathToPolyline(points); result.setPaint(Color.white); return result; } /** * Creates a PPath for the poly-line for the given points. * * @param xp array of x components of the points of the poly-lines * @param yp array of y components of the points of the poly-lines * * @return created poly-line for the given points */ public static PSWTPath createPolyline(final float[] xp, final float[] yp) { final PSWTPath result = new PSWTPath(); result.setPathToPolyline(xp, yp); result.setPaint(Color.white); return result; } /** * Creates an empty PSWTPath. */ public PSWTPath() { strokePaint = DEFAULT_STROKE_PAINT; } /** * Creates an SWTPath in the given shape with the default paint and stroke. * * @param aShape the desired shape */ public PSWTPath(final Shape aShape) { this(); setShape(aShape); } // **************************************************************** // Stroke // **************************************************************** /** * Returns the paint to use when drawing the stroke of the shape. * * @return path's stroke paint */ public Paint getStrokePaint() { return strokePaint; } /** * Sets the paint to use when drawing the stroke of the shape. * * @param strokeColor new stroke color */ public void setStrokeColor(final Paint strokeColor) { final Paint old = strokePaint; strokePaint = strokeColor; invalidatePaint(); firePropertyChange(PROPERTY_CODE_STROKE_PAINT, PROPERTY_STROKE_PAINT, old, strokePaint); } /** * Set the bounds of this path. This method works by scaling the path to fit * into the specified bounds. This normally works well, but if the specified * base bounds get too small then it is impossible to expand the path shape * again since all its numbers have tended to zero, so application code may * need to take this into consideration. * * @param x new left position of bounds * @param y new top position of bounds * @param width the new width of the bounds * @param height the new height of the bounds */ protected void internalUpdateBounds(final double x, final double y, final double width, final double height) { if (updatingBoundsFromPath) { return; } if (origShape == null) { return; } final Rectangle2D pathBounds = origShape.getBounds2D(); if (Math.abs(x - pathBounds.getX()) / x < BOUNDS_TOLERANCE && Math.abs(y - pathBounds.getY()) / y < BOUNDS_TOLERANCE && Math.abs(width - pathBounds.getWidth()) / width < BOUNDS_TOLERANCE && Math.abs(height - pathBounds.getHeight()) / height < BOUNDS_TOLERANCE) { return; } if (internalXForm == null) { internalXForm = new PAffineTransform(); } internalXForm.setToIdentity(); internalXForm.translate(x, y); internalXForm.scale(width / pathBounds.getWidth(), height / pathBounds.getHeight()); internalXForm.translate(-pathBounds.getX(), -pathBounds.getY()); try { inverseXForm = internalXForm.createInverse(); } catch (final Exception e) { throw new PAffineTransformException("unable to invert transform", internalXForm); } } /** * Returns true if path crosses the provided bounds. Takes visibility of * path into account. * * @param aBounds bounds being tested for intersection * @return true if path visibly crosses bounds */ public boolean intersects(final Rectangle2D aBounds) { if (super.intersects(aBounds)) { final Rectangle2D srcBounds; if (internalXForm == null) { srcBounds = aBounds; } else { srcBounds = new PBounds(aBounds); internalXForm.inverseTransform(srcBounds, srcBounds); } if (getPaint() != null && shape.intersects(srcBounds)) { return true; } else if (strokePaint != null) { return BASIC_STROKE.createStrokedShape(shape).intersects(srcBounds); } } return false; } /** * Recalculates the path's bounds by examining it's associated shape. */ public void updateBoundsFromPath() { updatingBoundsFromPath = true; if (origShape == null) { resetBounds(); } else { final Rectangle2D b = origShape.getBounds2D(); // Note that this pen width code does not really work for SWT since // it assumes // that the pen width scales - in actuality it does not. However, // the fix would // be to have volatile bounds for all shapes which isn't a nice // alternative super.setBounds(b.getX() - PEN_WIDTH, b.getY() - PEN_WIDTH, b.getWidth() + 2 * PEN_WIDTH, b.getHeight() + 2 * PEN_WIDTH); } updatingBoundsFromPath = false; } // **************************************************************** // Painting // **************************************************************** /** * Paints the path on the context provided. * * @param paintContext the context onto which the path will be painted */ protected void paint(final PPaintContext paintContext) { final Paint p = getPaint(); final SWTGraphics2D g2 = (SWTGraphics2D) paintContext.getGraphics(); g2.setTransparency(transparency); if (internalXForm != null) { g2.transform(internalXForm); } if (p != null) { g2.setBackground((Color) p); fillShape(g2); } if (strokePaint != null) { g2.setColor((Color) strokePaint); drawShape(g2); } if (inverseXForm != null) { g2.transform(inverseXForm); } } private void drawShape(final SWTGraphics2D g2) { final double lineWidth = g2.getTransformedLineWidth(); if (shape instanceof Rectangle2D) { g2.drawRect(shapePts[0] + lineWidth / 2, shapePts[1] + lineWidth / 2, shapePts[2] - lineWidth, shapePts[3] - lineWidth); } else if (shape instanceof Ellipse2D) { g2.drawOval(shapePts[0] + lineWidth / 2, shapePts[1] + lineWidth / 2, shapePts[2] - lineWidth, shapePts[3] - lineWidth); } else if (shape instanceof Arc2D) { g2.drawArc(shapePts[0] + lineWidth / 2, shapePts[1] + lineWidth / 2, shapePts[2] - lineWidth, shapePts[3] - lineWidth, shapePts[4], shapePts[5]); } else if (shape instanceof RoundRectangle2D) { g2.drawRoundRect(shapePts[0] + lineWidth / 2, shapePts[1] + lineWidth / 2, shapePts[2] - lineWidth, shapePts[3] - lineWidth, shapePts[4], shapePts[5]); } else { g2.draw(shape); } } private void fillShape(final SWTGraphics2D g2) { final double lineWidth = g2.getTransformedLineWidth(); if (shape instanceof Rectangle2D) { g2.fillRect(shapePts[0] + lineWidth / 2, shapePts[1] + lineWidth / 2, shapePts[2] - lineWidth, shapePts[3] - lineWidth); } else if (shape instanceof Ellipse2D) { g2.fillOval(shapePts[0] + lineWidth / 2, shapePts[1] + lineWidth / 2, shapePts[2] - lineWidth, shapePts[3] - lineWidth); } else if (shape instanceof Arc2D) { g2.fillArc(shapePts[0] + lineWidth / 2, shapePts[1] + lineWidth / 2, shapePts[2] - lineWidth, shapePts[3] - lineWidth, shapePts[4], shapePts[5]); } else if (shape instanceof RoundRectangle2D) { g2.fillRoundRect(shapePts[0] + lineWidth / 2, shapePts[1] + lineWidth / 2, shapePts[2] - lineWidth, shapePts[3] - lineWidth, shapePts[4], shapePts[5]); } else { g2.fill(shape); } } /** * Changes the underlying shape of this PSWTPath. * * @param newShape new associated shape of this PSWTPath */ public void setShape(final Shape newShape) { shape = cloneShape(newShape); origShape = shape; updateShapePoints(newShape); firePropertyChange(PROPERTY_CODE_PATH, PROPERTY_PATH, null, shape); updateBoundsFromPath(); invalidatePaint(); } /** * Updates the internal points used to draw the shape. * * @param aShape shape to read points from */ public void updateShapePoints(final Shape aShape) { if (aShape instanceof Rectangle2D) { if (shapePts == null || shapePts.length < 4) { shapePts = new double[4]; } shapePts[0] = ((Rectangle2D) shape).getX(); shapePts[1] = ((Rectangle2D) shape).getY(); shapePts[2] = ((Rectangle2D) shape).getWidth(); shapePts[3] = ((Rectangle2D) shape).getHeight(); } else if (aShape instanceof Ellipse2D) { if (shapePts == null || shapePts.length < 4) { shapePts = new double[4]; } shapePts[0] = ((Ellipse2D) shape).getX(); shapePts[1] = ((Ellipse2D) shape).getY(); shapePts[2] = ((Ellipse2D) shape).getWidth(); shapePts[3] = ((Ellipse2D) shape).getHeight(); } else if (aShape instanceof Arc2D) { if (shapePts == null || shapePts.length < 6) { shapePts = new double[6]; } shapePts[0] = ((Arc2D) shape).getX(); shapePts[1] = ((Arc2D) shape).getY(); shapePts[2] = ((Arc2D) shape).getWidth(); shapePts[3] = ((Arc2D) shape).getHeight(); shapePts[4] = ((Arc2D) shape).getAngleStart(); shapePts[5] = ((Arc2D) shape).getAngleExtent(); } else if (aShape instanceof RoundRectangle2D) { if (shapePts == null || shapePts.length < 6) { shapePts = new double[6]; } shapePts[0] = ((RoundRectangle2D) shape).getX(); shapePts[1] = ((RoundRectangle2D) shape).getY(); shapePts[2] = ((RoundRectangle2D) shape).getWidth(); shapePts[3] = ((RoundRectangle2D) shape).getHeight(); shapePts[4] = ((RoundRectangle2D) shape).getArcWidth(); shapePts[5] = ((RoundRectangle2D) shape).getArcHeight(); } else { shapePts = SWTShapeManager.shapeToPolyline(shape); } } /** * Clone's the shape provided. * * @param aShape shape to be cloned * * @return a cloned version of the provided shape */ public Shape cloneShape(final Shape aShape) { if (aShape instanceof Rectangle2D) { return new PBounds((Rectangle2D) aShape); } else if (aShape instanceof Ellipse2D) { final Ellipse2D e2 = (Ellipse2D) aShape; return new Ellipse2D.Double(e2.getX(), e2.getY(), e2.getWidth(), e2.getHeight()); } else if (aShape instanceof Arc2D) { final Arc2D a2 = (Arc2D) aShape; return new Arc2D.Double(a2.getX(), a2.getY(), a2.getWidth(), a2.getHeight(), a2.getAngleStart(), a2 .getAngleExtent(), a2.getArcType()); } else if (aShape instanceof RoundRectangle2D) { final RoundRectangle2D r2 = (RoundRectangle2D) aShape; return new RoundRectangle2D.Double(r2.getX(), r2.getY(), r2.getWidth(), r2.getHeight(), r2.getArcWidth(), r2.getArcHeight()); } else if (aShape instanceof Line2D) { final Line2D l2 = (Line2D) aShape; return new Line2D.Double(l2.getP1(), l2.getP2()); } else { final GeneralPath aPath = new GeneralPath(); aPath.append(aShape, false); return aPath; } } /** * Resets the path to a rectangle with the dimensions and position provided. * * @param x left of the rectangle * @param y top of te rectangle * @param width width of the rectangle * @param height height of the rectangle */ public void setPathToRectangle(final float x, final float y, final float width, final float height) { TEMP_RECTANGLE.setFrame(x, y, width, height); setShape(TEMP_RECTANGLE); } /** * Resets the path to a rectangle with the dimensions and position provided. * * @param x left of the rectangle * @param y top of te rectangle * @param width width of the rectangle * @param height height of the rectangle * @param arcWidth width of arc in the corners of the rectangle * @param arcHeight height of arc in the corners of the rectangle */ public void setPathToRoundRectangle(final float x, final float y, final float width, final float height, final float arcWidth, final float arcHeight) { TEMP_ROUNDRECTANGLE.setRoundRect(x, y, width, height, arcWidth, arcHeight); setShape(TEMP_ROUNDRECTANGLE); } /** * Resets the path to an ellipse positioned at the coordinate provided with * the dimensions provided. * * @param x left of the ellipse * @param y top of the ellipse * @param width width of the ellipse * @param height height of the ellipse */ public void setPathToEllipse(final float x, final float y, final float width, final float height) { TEMP_ELLIPSE.setFrame(x, y, width, height); setShape(TEMP_ELLIPSE); } /** * Sets the path to a sequence of segments described by the points. * * @param points points to that lie along the generated path */ public void setPathToPolyline(final Point2D[] points) { final GeneralPath path = new GeneralPath(); path.reset(); path.moveTo((float) points[0].getX(), (float) points[0].getY()); for (int i = 1; i < points.length; i++) { path.lineTo((float) points[i].getX(), (float) points[i].getY()); } setShape(path); } /** * Sets the path to a sequence of segments described by the point components * provided. * * @param xp the x components of the points along the path * @param yp the y components of the points along the path */ public void setPathToPolyline(final float[] xp, final float[] yp) { final GeneralPath path = new GeneralPath(); path.reset(); path.moveTo(xp[0], yp[0]); for (int i = 1; i < xp.length; i++) { path.lineTo(xp[i], yp[i]); } setShape(path); } /** * Return the center of this SWT path node, based on its bounds. * * @return the center of this SWT path node, based on its bounds */ public Point2D getCenter() { PBounds bounds = getBoundsReference(); return new Point2D.Double(bounds.x + (bounds.width / 2.0), bounds.y + (bounds.height / 2.0)); } /** * Return the transparency for this SWT path node. * * @return the transparency for this SWT path node */ public float getTransparency() { return transparency; } /** * Set the transparency for this SWT path node to <code>transparency</code>. * * @param transparency transparency, must be between <code>0.0f</code> and <code>1.0f</code> inclusive */ public void setTransparency(final float transparency) { if ((transparency < 0.0f) || (transparency > 1.0f)) { throw new IllegalArgumentException("transparency must be between 0.0f and 1.0f inclusive"); } this.transparency = transparency; } }
/* * #%L * Native ARchive plugin for Maven * %% * Copyright (C) 2002 - 2014 NAR Maven Plugin developers. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.github.maven_nar.cpptasks.compiler; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.Enumeration; import java.util.LinkedHashSet; import java.util.Set; import java.util.Vector; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.types.Environment; import com.github.maven_nar.NarUtil; import com.github.maven_nar.OS; import com.github.maven_nar.cpptasks.CCTask; import com.github.maven_nar.cpptasks.CUtil; import com.github.maven_nar.cpptasks.LinkerDef; import com.github.maven_nar.cpptasks.ProcessorDef; import com.github.maven_nar.cpptasks.ProcessorParam; import com.github.maven_nar.cpptasks.TargetDef; import com.github.maven_nar.cpptasks.VersionInfo; import com.github.maven_nar.cpptasks.types.CommandLineArgument; import com.github.maven_nar.cpptasks.types.LibrarySet; /** * An abstract Linker implementation that performs the link via an external * command. * * @author Adam Murdoch */ public abstract class CommandLineLinker extends AbstractLinker { private String command; private Environment env = null; private String identifier; private final String identifierArg; private final boolean isLibtool; private String[] librarySets; private final CommandLineLinker libtoolLinker; private final boolean newEnvironment = false; private final String outputSuffix; // FREEHEP private final int maxPathLength = 250; /** Creates a comand line linker invocation */ public CommandLineLinker(final String command, final String identifierArg, final String[] extensions, final String[] ignoredExtensions, final String outputSuffix, final boolean isLibtool, final CommandLineLinker libtoolLinker) { super(extensions, ignoredExtensions); this.command = command; this.identifierArg = identifierArg; this.outputSuffix = outputSuffix; this.isLibtool = isLibtool; this.libtoolLinker = libtoolLinker; } protected void addBase(final CCTask task, final long base, final Vector<String> args) { // NB: Do nothing by default. } protected void addEntry(final CCTask task, final String entry, final Vector<String> args) { // NB: Do nothing by default. } protected void addFixed(final CCTask task, final Boolean fixed, final Vector<String> args) { // NB: Do nothing by default. } protected void addImpliedArgs(final CCTask task, final boolean debug, final LinkType linkType, final Vector<String> args) { // NB: Do nothing by default. } protected void addIncremental(final CCTask task, final boolean incremental, final Vector<String> args) { // NB: Do nothing by default. } protected void addLibraryDirectory(final File libraryDirectory, final Vector<String> preargs) { try { if (libraryDirectory != null && libraryDirectory.exists()) { final File currentDir = new File(".").getParentFile(); String path = libraryDirectory.getCanonicalPath(); if (currentDir != null) { final String currentPath = currentDir.getCanonicalPath(); path = CUtil.getRelativePath(currentPath, libraryDirectory); } addLibraryPath(preargs, path); } } catch (final IOException e) { throw new RuntimeException("Unable to add library path: " + libraryDirectory); } } protected void addLibraryPath(final Vector<String> preargs, final String path) { } // // Windows processors handle these through file list // protected String[] addLibrarySets(final CCTask task, final LibrarySet[] libsets, final Vector<String> preargs, final Vector<String> midargs, final Vector<String> endargs) { return null; } protected void addMap(final CCTask task, final boolean map, final Vector<String> args) { // NB: Do nothing by default. } protected void addStack(final CCTask task, final int stack, final Vector<String> args) { // NB: Do nothing by default. } @Override protected LinkerConfiguration createConfiguration(final CCTask task, final LinkType linkType, final ProcessorDef[] baseDefs, final LinkerDef specificDef, final TargetDef targetPlatform, final VersionInfo versionInfo) { final Vector<String> preargs = new Vector<String>(); final Vector<String> midargs = new Vector<String>(); final Vector<String> endargs = new Vector<String>(); final Vector<String>[] args = new Vector[] { preargs, midargs, endargs }; final LinkerDef[] defaultProviders = new LinkerDef[baseDefs.length + 1]; defaultProviders[0] = specificDef; for (int i = 0; i < baseDefs.length; i++) { defaultProviders[i + 1] = (LinkerDef) baseDefs[i]; } // // add command line arguments inherited from <cc> element // any "extends" and finally the specific CompilerDef CommandLineArgument[] commandArgs; for (int i = defaultProviders.length - 1; i >= 0; i--) { final LinkerDef linkerDef = defaultProviders[i]; commandArgs = linkerDef.getActiveProcessorArgs(); for (final CommandLineArgument commandArg : commandArgs) { args[commandArg.getLocation()].addElement(commandArg.getValue()); } } final Set<File> libraryDirectories = new LinkedHashSet<File>(); for (int i = defaultProviders.length - 1; i >= 0; i--) { final LinkerDef linkerDef = defaultProviders[i]; for (final File libraryDirectory : linkerDef.getLibraryDirectories()) { if (libraryDirectories.add(libraryDirectory)) { addLibraryDirectory(libraryDirectory, preargs); } } } final Vector<ProcessorParam> params = new Vector<ProcessorParam>(); // // add command line arguments inherited from <cc> element // any "extends" and finally the specific CompilerDef ProcessorParam[] paramArray; for (int i = defaultProviders.length - 1; i >= 0; i--) { paramArray = defaultProviders[i].getActiveProcessorParams(); for (final ProcessorParam element : paramArray) { params.add(element); } } paramArray = params.toArray(new ProcessorParam[params.size()]); final boolean debug = specificDef.getDebug(baseDefs, 0); final String startupObject = getStartupObject(linkType); addImpliedArgs(task, debug, linkType, preargs); addIncremental(task, specificDef.getIncremental(defaultProviders, 1), preargs); addFixed(task, specificDef.getFixed(defaultProviders, 1), preargs); addMap(task, specificDef.getMap(defaultProviders, 1), preargs); addBase(task, specificDef.getBase(defaultProviders, 1), preargs); addStack(task, specificDef.getStack(defaultProviders, 1), preargs); addEntry(task, specificDef.getEntry(defaultProviders, 1), preargs); String[] libnames = null; final LibrarySet[] libsets = specificDef.getActiveLibrarySets(defaultProviders, 1); // FREEHEP call at all times // if (libsets.length > 0) { libnames = addLibrarySets(task, libsets, preargs, midargs, endargs); // } final StringBuffer buf = new StringBuffer(getIdentifier()); for (int i = 0; i < 3; i++) { final Enumeration<String> argenum = args[i].elements(); while (argenum.hasMoreElements()) { buf.append(' '); buf.append(argenum.nextElement()); } } final String configId = buf.toString(); final String[][] options = new String[][] { new String[args[0].size() + args[1].size()], new String[args[2].size()] }; args[0].copyInto(options[0]); final int offset = args[0].size(); for (int i = 0; i < args[1].size(); i++) { options[0][i + offset] = args[1].elementAt(i); } args[2].copyInto(options[1]); // if this linker doesn't have an env, and there is a more generically // definition for environment, use it. if (null != specificDef.getEnv() && null == this.env) { this.env = specificDef.getEnv(); } for (final ProcessorDef processorDef : baseDefs) { final Environment environment = processorDef.getEnv(); if (null != environment && null == this.env) { this.env = environment; } } final boolean rebuild = specificDef.getRebuild(baseDefs, 0); final boolean map = specificDef.getMap(defaultProviders, 1); final String toolPath = specificDef.getToolPath(); // task.log("libnames:"+libnames.length, Project.MSG_VERBOSE); return new CommandLineLinkerConfiguration(this, configId, options, paramArray, rebuild, map, debug, libnames, startupObject, toolPath); } /** * Allows drived linker to decorate linker option. * Override by GccLinker to prepend a "-Wl," to * pass option to through gcc to linker. * * @param buf * buffer that may be used and abused in the decoration process, * must not be null. * @param arg * linker argument */ protected String decorateLinkerOption(final StringBuffer buf, final String arg) { return arg; } protected final String getCommand() { return this.command; } protected abstract String getCommandFileSwitch(String commandFile); public String getCommandWithPath(final CommandLineLinkerConfiguration config) { if (config.getCommandPath() != null) { final File command = new File(config.getCommandPath(), this.getCommand()); try { return command.getCanonicalPath(); } catch (final IOException e) { e.printStackTrace(); return command.getAbsolutePath(); } } else { return this.getCommand(); } } @Override public String getIdentifier() { if (this.identifier == null) { if (this.identifierArg == null) { this.identifier = getIdentifier(new String[] { this.command }, this.command); } else { this.identifier = getIdentifier(new String[] { this.command, this.identifierArg }, this.command); } } return this.identifier; } public final CommandLineLinker getLibtoolLinker() { if (this.libtoolLinker != null) { return this.libtoolLinker; } return this; } protected abstract int getMaximumCommandLength(); @Override public String[] getOutputFileNames(final String baseName, final VersionInfo versionInfo) { return new String[] { baseName + this.outputSuffix }; } protected String[] getOutputFileSwitch(final CCTask task, final String outputFile) { // FREEHEP BEGIN if (isWindows() && outputFile.length() > this.maxPathLength) { throw new BuildException("Absolute path too long, " + outputFile.length() + " > " + this.maxPathLength + ": '" + outputFile); } // FREEHEP END return getOutputFileSwitch(outputFile); } protected abstract String[] getOutputFileSwitch(String outputFile); protected String getStartupObject(final LinkType linkType) { return null; } /** * Performs a link using a command line linker * */ public void link(final CCTask task, final File outputFile, final String[] sourceFiles, final CommandLineLinkerConfiguration config) throws BuildException { final File parentDir = new File(outputFile.getParent()); String parentPath; try { parentPath = parentDir.getCanonicalPath(); } catch (final IOException ex) { parentPath = parentDir.getAbsolutePath(); } String[] execArgs = prepareArguments(task, parentPath, outputFile.getName(), sourceFiles, config); int commandLength = 0; for (final String execArg : execArgs) { commandLength += execArg.length() + 1; } // // if command length exceeds maximum // then create a temporary // file containing everything but the command name if (commandLength >= this.getMaximumCommandLength()) { try { execArgs = prepareResponseFile(outputFile, execArgs); } catch (final IOException ex) { throw new BuildException(ex); } } final int retval = runCommand(task, parentDir, execArgs); // // if the process returned a failure code then // throw an BuildException // if (retval != 0) { // // construct the exception // throw new BuildException(getCommandWithPath(config) + " failed with return code " + retval, task.getLocation()); } } /** * Prepares argument list for exec command. Will return null * if command line would exceed allowable command line buffer. * * @param task * compilation task. * @param outputFile * linker output file * @param sourceFiles * linker input files (.obj, .o, .res) * @param config * linker configuration * @return arguments for runTask */ protected String[] prepareArguments(final CCTask task, final String outputDir, final String outputFile, final String[] sourceFiles, final CommandLineLinkerConfiguration config) { final String[] preargs = config.getPreArguments(); final String[] endargs = config.getEndArguments(); final String outputSwitch[] = getOutputFileSwitch(task, outputFile); int allArgsCount = preargs.length + 1 + outputSwitch.length + sourceFiles.length + endargs.length; if (this.isLibtool) { allArgsCount++; } final String[] allArgs = new String[allArgsCount]; int index = 0; if (this.isLibtool) { allArgs[index++] = "libtool"; } allArgs[index++] = getCommandWithPath(config); final StringBuffer buf = new StringBuffer(); for (final String prearg : preargs) { allArgs[index++] = task.isDecorateLinkerOptions() ? decorateLinkerOption(buf, prearg) : prearg; } for (final String element : outputSwitch) { allArgs[index++] = element; } for (final String sourceFile : sourceFiles) { allArgs[index++] = prepareFilename(buf, outputDir, sourceFile); } for (final String endarg : endargs) { allArgs[index++] = task.isDecorateLinkerOptions() ? decorateLinkerOption(buf, endarg) : endarg; } return allArgs; } /** * Processes filename into argument form * */ protected String prepareFilename(final StringBuffer buf, final String outputDir, final String sourceFile) { // FREEHEP BEGIN exit if absolute path is too long. Max length on relative // paths in windows is even shorter. if (isWindows() && sourceFile.length() > this.maxPathLength) { throw new BuildException("Absolute path too long, " + sourceFile.length() + " > " + this.maxPathLength + ": '" + sourceFile); } // FREEHEP END return quoteFilename(buf, sourceFile); } /** * Prepares argument list to execute the linker using a * response file. * * @param outputFile * linker output file * @param args * output of prepareArguments * @return arguments for runTask */ protected String[] prepareResponseFile(final File outputFile, final String[] args) throws IOException { final String baseName = outputFile.getName(); final File commandFile = new File(outputFile.getParent(), baseName + ".rsp"); final FileWriter writer = new FileWriter(commandFile); int execArgCount = 1; if (this.isLibtool) { execArgCount++; } final String[] execArgs = new String[execArgCount + 1]; for (int i = 0; i < execArgCount; i++) { execArgs[i] = args[i]; } execArgs[execArgCount] = getCommandFileSwitch(commandFile.toString()); for (int i = execArgCount; i < args.length; i++) { // // if embedded space and not quoted then // quote argument if (args[i].indexOf(" ") >= 0 && args[i].charAt(0) != '\"') { writer.write('\"'); writer.write(args[i]); writer.write("\"\n"); } else { writer.write(args[i]); writer.write('\n'); } } writer.close(); return execArgs; } protected String quoteFilename(final StringBuffer buf, final String filename) { if (filename.indexOf(' ') >= 0) { buf.setLength(0); buf.append('\"'); buf.append(filename); buf.append('\"'); return buf.toString(); } return filename; } /** * This method is exposed so test classes can overload * and test the arguments without actually spawning the * compiler */ protected int runCommand(final CCTask task, final File workingDir, final String[] cmdline) throws BuildException { return CUtil.runCommand(task, workingDir, cmdline, this.newEnvironment, this.env); } protected final void setCommand(final String command) { this.command = command; } }
// Copyright 2010-2016, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package org.mozc.android.inputmethod.japanese.preference; import org.mozc.android.inputmethod.japanese.resources.R; import android.app.Dialog; import android.app.AlertDialog.Builder; import android.content.Context; import android.content.DialogInterface; import android.content.DialogInterface.OnMultiChoiceClickListener; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.content.res.TypedArray; import android.os.Bundle; import android.os.Parcel; import android.os.Parcelable; import android.preference.DialogPreference; import android.util.AttributeSet; /** * Preference widget with a dialog containing multi selectable items. * */ public class MozcMultiSelectListPreference extends DialogPreference { /** * State of the current dialog. */ private static class SavedState extends BaseSavedState { boolean isDialogShowing; Bundle dialogBundle; public SavedState(Parcel source) { super(source); isDialogShowing = (source.readInt() != 0); dialogBundle = source.readBundle(); } public SavedState(Parcelable superState) { super(superState); } @Override public void writeToParcel(Parcel dest, int flags) { super.writeToParcel(dest, flags); dest.writeInt(isDialogShowing ? 1 : 0); dest.writeBundle(dialogBundle); } @SuppressWarnings({"hiding", "unused"}) public static final Creator<SavedState> CREATOR = new Creator<SavedState>() { @Override public SavedState createFromParcel(Parcel in) { return new SavedState(in); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }; } /** A list of entries shown on the dialog. */ private CharSequence[] entryList; /** A list of SharedPreferences' keys. */ private CharSequence[] keyList; /** A list of current values. */ private boolean[] valueList; /** A copy of values for dialog. This may be just discarded when user clicks "cancel." */ private boolean[] dialogValueList; public MozcMultiSelectListPreference(Context context, AttributeSet attrs) { super(context, attrs); initialize(attrs); } public MozcMultiSelectListPreference(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); initialize(attrs); } private void initialize(AttributeSet attrs) { TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.MozcMultiSelectListPreference); entryList = a.getTextArray(R.styleable.MozcMultiSelectListPreference_entries); keyList = a.getTextArray(R.styleable.MozcMultiSelectListPreference_entryKeys); a.recycle(); } public void setKeys(CharSequence[] keyList) { if (isDialogShowing()) { throw new IllegalStateException("Keys cannot be set when dialog is showing."); } this.keyList = keyList; } public void setEntries(CharSequence[] entryList) { if (isDialogShowing()) { throw new IllegalStateException("Entries cannot be set when dialog is showing."); } this.entryList = entryList; } public void setValues(boolean[] valueList) { if (isDialogShowing()) { throw new IllegalStateException("Values cannot be set when dialog is showing."); } boolean[] oldValueList = this.valueList; this.valueList = valueList.clone(); persistBooleanArray(keyList, oldValueList, valueList); } public boolean[] getValues() { return valueList.clone(); } private boolean persistBooleanArray( CharSequence[] keyList, boolean[] oldValueList, boolean[] valueList) { if (!shouldPersist()) { return false; } Editor editor = getSharedPreferences().edit(); for (int i = 0; i < keyList.length; ++i) { if (oldValueList == null || oldValueList[i] != valueList[i]) { editor.putBoolean(keyList[i].toString(), valueList[i]); } } editor.commit(); return true; } @Override protected boolean shouldPersist() { // Look at keyList instead of Key. return getPreferenceManager() != null && isPersistent() && keyList != null; } private boolean isDialogShowing() { Dialog dialog = getDialog(); return dialog != null && dialog.isShowing(); } @Override protected void onPrepareDialogBuilder(Builder builder) { super.onPrepareDialogBuilder(builder); if (entryList == null || keyList == null || valueList == null) { throw new IllegalStateException(); } if (entryList.length != keyList.length || entryList.length != valueList.length) { throw new IllegalStateException( "All entryList, keyList and valueList must have the same number of elements: " + entryList.length + ", " + keyList.length + ", " + valueList.length); } // Set multi selectable items and its handler. dialogValueList = valueList.clone(); builder.setMultiChoiceItems(entryList, dialogValueList, new OnMultiChoiceClickListener() { @Override public void onClick(DialogInterface dialog, int which, boolean isChecked) { dialogValueList[which] = isChecked; } }); } @Override protected void onDialogClosed(boolean positiveResult) { super.onDialogClosed(positiveResult); if (positiveResult) { // If user tap OK, set the value. setValues(dialogValueList); } } @Override protected Object onGetDefaultValue(TypedArray a, int index) { return toBooleanArray(a.getTextArray(index)); } @Override protected void onSetInitialValue(boolean restoreValue, Object defaultValue) { if (!shouldPersist()) { // restoreValue may be the result of look up for the key, but in this class // what we need to check is keyList. setValues((boolean[]) defaultValue); return; } // Use persisted values if exist, otherwise use default value. boolean[] valueList = ((boolean[]) defaultValue).clone(); SharedPreferences sharedPreferences = getSharedPreferences(); for (int i = 0; i < keyList.length; ++i) { if (sharedPreferences.contains(keyList[i].toString())) { valueList[i] = sharedPreferences.getBoolean(keyList[i].toString(), valueList[i]); } } setValues(valueList); } /** Parses each text in {@code array} to boolean value, and returns as an array. */ private static boolean[] toBooleanArray(CharSequence[] array) { if (array == null) { return null; } boolean[] result = new boolean[array.length]; for (int i = 0; i < result.length; ++i) { if (array[i] != null) { result[i] = Boolean.parseBoolean(array[i].toString()); } } return result; } @Override protected Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); if (!isDialogShowing()) { return superState; } // Now the dialog is showing. Keep the current state. SavedState state = new SavedState(superState); state.isDialogShowing = true; state.dialogBundle = getDialog().onSaveInstanceState(); return state; } @Override protected void onRestoreInstanceState(Parcelable parcelable) { if (!(parcelable instanceof SavedState)) { super.onRestoreInstanceState(parcelable); return; } SavedState state = SavedState.class.cast(parcelable); super.onRestoreInstanceState(state.getSuperState()); if (state.isDialogShowing) { // The dialog was shown when the state was saved. Re-show the dialog. showDialog(state.dialogBundle); } } }