gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (C) 2015-2015 The Helenus Driver Project Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.helenus.driver.impl; import java.util.List; import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSetFuture; import com.datastax.driver.core.policies.RetryPolicy; import com.google.common.util.concurrent.ListenableFuture; import org.helenus.driver.GenericStatement; /** * The <code>ForwardingStatementImpl</code> class defines a utility class to * create a statement that encapsulate another one. * * @copyright 2015-2015 The Helenus Driver Project Authors * * @author The Helenus Driver Project Authors * @version 1 - Jan 19, 2015 - paouelle - Creation * * @param <R> The type of result returned when executing this statement * @param <F> The type of future result returned when executing this statement * @param <T> The type of POJO associated with this statement * @param <S> The type of statement encapsulated * * @since 1.0 */ public abstract class ForwardingStatementImpl<R, F extends ListenableFuture<R>, T, S extends StatementImpl<R, F, T>> extends StatementImpl<R, F, T> { /** * Holds the encapsulated statement. * * @author paouelle */ protected S statement; /** * Instantiates a new <code>ForwardingStatementImpl</code> object. * * @author paouelle * * @param statement the encapsulated statement */ protected ForwardingStatementImpl(S statement) { super(statement); this.statement = statement; } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#buildQueryStrings() */ @Override protected StringBuilder[] buildQueryStrings() { return statement.buildQueryStrings(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#appendGroupType(java.lang.StringBuilder) */ @Override protected void appendGroupType(StringBuilder builder) { statement.appendGroupSubType(builder); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#appendGroupSubType(java.lang.StringBuilder) */ @Override protected void appendGroupSubType(StringBuilder builder) { statement.appendGroupSubType(builder); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#appendOptions(java.lang.StringBuilder) */ @Override protected void appendOptions(StringBuilder builder) { statement.appendOptions(builder); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#buildStatements() */ @Override protected List<StatementImpl<?, ?, ?>> buildStatements() { return statement.buildStatements(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#buildQueryString() */ @Override protected StringBuilder buildQueryString() { return statement.buildQueryString(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setDirty() */ @Override protected void setDirty() { statement.setDirty(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setDirty(boolean) */ @Override protected void setDirty(boolean recurse) { statement.setDirty(recurse); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#isDirty() */ @Override protected boolean isDirty() { return statement.isDirty(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#simpleSize() */ @Override protected int simpleSize() { return statement.simpleSize(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#isCounterOp() */ @Override protected boolean isCounterOp() { return statement.isCounterOp(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setCounterOp(boolean) */ @Override protected void setCounterOp(boolean isCounterOp) { statement.setCounterOp(isCounterOp); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getObjectClass() */ @Override public Class<T> getObjectClass() { return statement.getObjectClass(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getContext() */ @Override public ClassInfoImpl<T>.Context getContext() { return statement.getContext(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getPOJOContext() */ @Override public ClassInfoImpl<T>.POJOContext getPOJOContext() { return statement.getPOJOContext(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getKeyspace() */ @Override public String getKeyspace() { return statement.getKeyspace(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#enable() */ @Override public GenericStatement<R, F> enable() { return statement.enable(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#disable() */ @Override public GenericStatement<R, F> disable() { return statement.disable(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#isEnabled() */ @Override public boolean isEnabled() { return statement.isEnabled(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setConsistencyLevel(com.datastax.driver.core.ConsistencyLevel) */ @Override public GenericStatement<R, F> setConsistencyLevel(ConsistencyLevel consistency) { return statement.setConsistencyLevel(consistency); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getConsistencyLevel() */ @Override public ConsistencyLevel getConsistencyLevel() { return statement.getConsistencyLevel(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setSerialConsistencyLevel(com.datastax.driver.core.ConsistencyLevel) */ @Override public GenericStatement<R, F> setSerialConsistencyLevel( ConsistencyLevel serialConsistency ) { return statement.setSerialConsistencyLevel(serialConsistency); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getSerialConsistencyLevel() */ @Override public ConsistencyLevel getSerialConsistencyLevel() { return statement.getSerialConsistencyLevel(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#enableTracing() */ @Override public GenericStatement<R, F> enableTracing() { return statement.enableTracing(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#enableTracing(java.lang.String) */ @Override public GenericStatement<R, F> enableTracing(String prefix) { return statement.enableTracing(prefix); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#disableTracing() */ @Override public GenericStatement<R, F> disableTracing() { return statement.disableTracing(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#isTracing() */ @Override public boolean isTracing() { return statement.isTracing(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#enableErrorTracing() */ @Override public GenericStatement<R, F> enableErrorTracing() { return statement.enableErrorTracing(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#enableErrorTracing(java.lang.String) */ @Override public GenericStatement<R, F> enableErrorTracing(String prefix) { return statement.enableErrorTracing(prefix); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#disableErrorTracing() */ @Override public GenericStatement<R, F> disableErrorTracing() { return statement.disableErrorTracing(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#isErrorTracing() */ @Override public boolean isErrorTracing() { return statement.isErrorTracing(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setRetryPolicy(com.datastax.driver.core.policies.RetryPolicy) */ @Override public GenericStatement<R, F> setRetryPolicy(RetryPolicy policy) { return statement.setRetryPolicy(policy); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getRetryPolicy() */ @Override public RetryPolicy getRetryPolicy() { return statement.getRetryPolicy(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setFetchSize(int) */ @Override public GenericStatement<R, F> setFetchSize(int fetchSize) { return statement.setFetchSize(fetchSize); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getFetchSize() */ @Override public int getFetchSize() { return statement.getFetchSize(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setDefaultTimestamp(long) */ @Override public GenericStatement<R, F> setDefaultTimestamp(long defaultTimestamp) { return statement.setDefaultTimestamp(defaultTimestamp); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getDefaultTimestamp() */ @Override public long getDefaultTimestamp() { return statement.getDefaultTimestamp(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setReadTimeoutMillis(int) */ @Override public GenericStatement<R, F> setReadTimeoutMillis(int readTimeoutMillis) { return statement.setReadTimeoutMillis(readTimeoutMillis); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getReadTimeoutMillis() */ @Override public int getReadTimeoutMillis() { return statement.getReadTimeoutMillis(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setIdempotent(boolean) */ @Override public GenericStatement<R, F> setIdempotent(boolean idempotent) { return statement.setIdempotent(idempotent); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#isIdempotent() */ @Override public Boolean isIdempotent() { return statement.isIdempotent(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getQueryString() */ @Override public String getQueryString() { return statement.getQueryString(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#getUserData() */ @Override public <U> U getUserData() { return statement.getUserData(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#setUserData(java.lang.Object) */ @Override public <U> void setUserData(U data) { statement.setUserData(data); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#execute() */ @Override public R execute() { return statement.execute(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#executeAsync() */ @Override public F executeAsync() { return statement.executeAsync(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#executeRaw() */ @Override public ResultSet executeRaw() { return statement.executeRaw(); } /** * {@inheritDoc} * * @author paouelle * * @see org.helenus.driver.impl.StatementImpl#executeAsyncRaw() */ @Override public ResultSetFuture executeAsyncRaw() { return statement.executeAsyncRaw(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.jetbrains.python.inspections; import com.google.common.collect.ImmutableMap; import com.intellij.codeInsight.controlflow.ControlFlowUtil; import com.intellij.codeInsight.controlflow.Instruction; import com.intellij.codeInspection.*; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.containers.ContainerUtil; import com.jetbrains.python.PyBundle; import com.jetbrains.python.PyNames; import com.jetbrains.python.codeInsight.controlflow.ControlFlowCache; import com.jetbrains.python.codeInsight.controlflow.ReadWriteInstruction; import com.jetbrains.python.codeInsight.controlflow.ScopeOwner; import com.jetbrains.python.codeInsight.dataflow.scope.Scope; import com.jetbrains.python.codeInsight.dataflow.scope.ScopeUtil; import com.jetbrains.python.documentation.doctest.PyDocReference; import com.jetbrains.python.inspections.quickfix.AddFieldQuickFix; import com.jetbrains.python.inspections.quickfix.PyRemoveParameterQuickFix; import com.jetbrains.python.inspections.quickfix.PyRemoveStatementQuickFix; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyAugAssignmentStatementNavigator; import com.jetbrains.python.psi.impl.PyBuiltinCache; import com.jetbrains.python.psi.impl.PyForStatementNavigator; import com.jetbrains.python.psi.impl.PyImportStatementNavigator; import com.jetbrains.python.psi.resolve.PyResolveContext; import com.jetbrains.python.psi.search.PyOverridingMethodsSearch; import com.jetbrains.python.psi.search.PySuperMethodsSearch; import com.jetbrains.python.pyi.PyiUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import static com.jetbrains.python.psi.PyUtil.as; /** * @author oleg */ public class PyUnusedLocalInspectionVisitor extends PyInspectionVisitor { private final boolean myIgnoreTupleUnpacking; private final boolean myIgnoreLambdaParameters; private final boolean myIgnoreRangeIterationVariables; private final boolean myIgnoreVariablesStartingWithUnderscore; private final HashSet<PsiElement> myUnusedElements; private final HashSet<PsiElement> myUsedElements; public PyUnusedLocalInspectionVisitor(@NotNull ProblemsHolder holder, @NotNull LocalInspectionToolSession session, boolean ignoreTupleUnpacking, boolean ignoreLambdaParameters, boolean ignoreRangeIterationVariables, boolean ignoreVariablesStartingWithUnderscore) { super(holder, session); myIgnoreTupleUnpacking = ignoreTupleUnpacking; myIgnoreLambdaParameters = ignoreLambdaParameters; myIgnoreRangeIterationVariables = ignoreRangeIterationVariables; myIgnoreVariablesStartingWithUnderscore = ignoreVariablesStartingWithUnderscore; myUnusedElements = new HashSet<>(); myUsedElements = new HashSet<>(); } @Override public void visitPyFunction(final PyFunction node) { if (!PyiUtil.isOverload(node, myTypeEvalContext)) { processScope(node); } } @Override public void visitPyLambdaExpression(final PyLambdaExpression node) { processScope(node); } @Override public void visitPyClass(PyClass node) { processScope(node); } private void processScope(final ScopeOwner owner) { if (owner.getContainingFile() instanceof PyExpressionCodeFragment) { return; } if (!(owner instanceof PyClass) && !callsLocals(owner)) { collectAllWrites(owner); } collectUsedReads(owner); } @Override public void visitPyStringLiteralExpression(PyStringLiteralExpression pyString) { final ScopeOwner owner = ScopeUtil.getScopeOwner(pyString); if (owner != null && !(owner instanceof PsiFile)) { final PsiElement instrAnchor = PyDocReference.getControlFlowAnchorForFString(pyString); if (instrAnchor == null) return; final Instruction[] instructions = ControlFlowCache.getControlFlow(owner).getInstructions(); final int startInstruction = ControlFlowUtil.findInstructionNumberByElement(instructions, instrAnchor); if (startInstruction < 0) return; final Project project = pyString.getProject(); final List<Pair<PsiElement, TextRange>> pairs = InjectedLanguageManager.getInstance(project).getInjectedPsiFiles(pyString); if (pairs != null) { for (Pair<PsiElement, TextRange> pair : pairs) { pair.getFirst().accept(new PyRecursiveElementVisitor() { @Override public void visitPyReferenceExpression(PyReferenceExpression expr) { final PyExpression qualifier = expr.getQualifier(); if (qualifier != null) { qualifier.accept(this); return; } final String name = expr.getName(); if (name != null) { analyzeReadsInScope(name, owner, instructions, startInstruction, pyString); } } }); } } } } private void collectAllWrites(ScopeOwner owner) { final Instruction[] instructions = ControlFlowCache.getControlFlow(owner).getInstructions(); for (Instruction instruction : instructions) { final PsiElement element = instruction.getElement(); if (element instanceof PyFunction && owner instanceof PyFunction) { if (PyKnownDecoratorUtil.hasUnknownDecorator((PyFunction)element, myTypeEvalContext)) { continue; } if (!myUsedElements.contains(element)) { myUnusedElements.add(element); } } else if (instruction instanceof ReadWriteInstruction) { final ReadWriteInstruction readWriteInstruction = (ReadWriteInstruction)instruction; final ReadWriteInstruction.ACCESS access = readWriteInstruction.getAccess(); if (!access.isWriteAccess()) { continue; } final String name = readWriteInstruction.getName(); // Ignore empty, wildcards, global and nonlocal names final Scope scope = ControlFlowCache.getScope(owner); if (name == null || PyNames.UNDERSCORE.equals(name) || scope.isGlobal(name) || scope.isNonlocal(name)) { continue; } if (element instanceof PyTargetExpression && ((PyTargetExpression)element).isQualified()) { continue; } // Ignore underscore-prefixed parameters if (name.startsWith(PyNames.UNDERSCORE) && element instanceof PyParameter) { continue; } // Ignore elements out of scope if (element == null || !PsiTreeUtil.isAncestor(owner, element, false)) { continue; } // Ignore arguments of import statement if (PyImportStatementNavigator.getImportStatementByElement(element) != null) { continue; } if (PyAugAssignmentStatementNavigator.getStatementByTarget(element) != null) { continue; } if (parameterInMethodWithFixedSignature(owner, element)) { continue; } if (!myUsedElements.contains(element)) { myUnusedElements.add(element); } } } } private static boolean parameterInMethodWithFixedSignature(@NotNull ScopeOwner owner, @NotNull PsiElement element) { if (owner instanceof PyFunction && element instanceof PyParameter) { final PyFunction function = (PyFunction)owner; final String functionName = function.getName(); final LanguageLevel level = LanguageLevel.forElement(function); final ImmutableMap<String, PyNames.BuiltinDescription> builtinMethods = function.getContainingClass() != null ? PyNames.getBuiltinMethods(level) : PyNames.getModuleBuiltinMethods(level); return !PyNames.INIT.equals(functionName) && builtinMethods.containsKey(functionName); } return false; } private void collectUsedReads(final ScopeOwner owner) { final Instruction[] instructions = ControlFlowCache.getControlFlow(owner).getInstructions(); for (int i = 0; i < instructions.length; i++) { final Instruction instruction = instructions[i]; if (instruction instanceof ReadWriteInstruction) { final ReadWriteInstruction readWriteInstruction = (ReadWriteInstruction)instruction; final ReadWriteInstruction.ACCESS access = readWriteInstruction.getAccess(); if (!access.isReadAccess()) { continue; } final String name = readWriteInstruction.getName(); if (name == null) { continue; } final PsiElement element = instruction.getElement(); // Ignore elements out of scope if (element == null || !PsiTreeUtil.isAncestor(owner, element, false)) { continue; } final int startInstruction; if (access.isWriteAccess()) { final PyAugAssignmentStatement augAssignmentStatement = PyAugAssignmentStatementNavigator.getStatementByTarget(element); startInstruction = ControlFlowUtil.findInstructionNumberByElement(instructions, augAssignmentStatement); } else { startInstruction = i; } analyzeReadsInScope(name, owner, instructions, startInstruction, as(element, PyReferenceExpression.class)); } } } private void analyzeReadsInScope(@NotNull String name, @NotNull ScopeOwner owner, @NotNull Instruction[] instructions, int startInstruction, @Nullable PsiElement scopeAnchor) { // Check if the element is declared out of scope, mark all out of scope write accesses as used if (scopeAnchor != null) { final ScopeOwner declOwner = ScopeUtil.getDeclarationScopeOwner(scopeAnchor, name); if (declOwner != null && declOwner != owner) { final Collection<PsiElement> writeElements = ScopeUtil.getReadWriteElements(name, declOwner, false, true); for (PsiElement e : writeElements) { myUsedElements.add(e); myUnusedElements.remove(e); } } } ControlFlowUtil.iteratePrev(startInstruction, instructions, inst -> { final PsiElement instElement = inst.getElement(); // Mark function as used if (instElement instanceof PyFunction) { if (name.equals(((PyFunction)instElement).getName())){ myUsedElements.add(instElement); myUnusedElements.remove(instElement); return ControlFlowUtil.Operation.CONTINUE; } } // Mark write access as used else if (inst instanceof ReadWriteInstruction) { final ReadWriteInstruction rwInstruction = (ReadWriteInstruction)inst; if (rwInstruction.getAccess().isWriteAccess() && name.equals(rwInstruction.getName())) { // For elements in scope if (instElement != null && PsiTreeUtil.isAncestor(owner, instElement, false)) { myUsedElements.add(instElement); myUnusedElements.remove(instElement); } return ControlFlowUtil.Operation.CONTINUE; } } return ControlFlowUtil.Operation.NEXT; }); } static class DontPerformException extends RuntimeException {} private static boolean callsLocals(final ScopeOwner owner) { try { owner.acceptChildren(new PyRecursiveElementVisitor(){ @Override public void visitPyCallExpression(final PyCallExpression node) { final PyExpression callee = node.getCallee(); if (callee != null && "locals".equals(callee.getName())){ throw new DontPerformException(); } node.acceptChildren(this); // look at call expr in arguments } @Override public void visitPyFunction(final PyFunction node) { // stop here } }); } catch (DontPerformException e) { return true; } return false; } void registerProblems() { final List<PyInspectionExtension> filters = PyInspectionExtension.EP_NAME.getExtensionList(); // Register problems final Set<PyFunction> functionsWithInheritors = new HashSet<>(); final Map<PyFunction, Boolean> emptyFunctions = new HashMap<>(); for (PsiElement element : myUnusedElements) { boolean ignoreUnused = false; for (PyInspectionExtension filter : filters) { if (filter.ignoreUnused(element, myTypeEvalContext)) { ignoreUnused = true; } } if (ignoreUnused) continue; if (element instanceof PyFunction) { // Local function final PsiElement nameIdentifier = ((PyFunction)element).getNameIdentifier(); registerWarning(nameIdentifier == null ? element : nameIdentifier, PyBundle.message("INSP.unused.locals.local.function.isnot.used", ((PyFunction)element).getName()), new PyRemoveStatementQuickFix()); } else if (element instanceof PyClass) { // Local class final PyClass cls = (PyClass)element; final PsiElement name = cls.getNameIdentifier(); registerWarning(name != null ? name : element, PyBundle.message("INSP.unused.locals.local.class.isnot.used", cls.getName()), new PyRemoveStatementQuickFix()); } else { // Local variable or parameter String name = element.getText(); if (element instanceof PyNamedParameter || element.getParent() instanceof PyNamedParameter) { PyNamedParameter namedParameter = element instanceof PyNamedParameter ? (PyNamedParameter) element : (PyNamedParameter) element.getParent(); name = namedParameter.getName(); // When function is inside a class, first parameter may be either self or cls which is always 'used'. if (namedParameter.isSelf()) { continue; } if (myIgnoreLambdaParameters && PsiTreeUtil.getParentOfType(element, PyCallable.class) instanceof PyLambdaExpression) { continue; } boolean mayBeField = false; PyClass containingClass = null; PyParameterList paramList = PsiTreeUtil.getParentOfType(element, PyParameterList.class); if (paramList != null && paramList.getParent() instanceof PyFunction) { final PyFunction func = (PyFunction) paramList.getParent(); containingClass = func.getContainingClass(); if (PyNames.INIT.equals(func.getName()) && containingClass != null && !namedParameter.isKeywordContainer() && !namedParameter.isPositionalContainer()) { mayBeField = true; } else if (ignoreUnusedParameters(func, functionsWithInheritors)) { continue; } if (func.asMethod() != null) { Boolean isEmpty = emptyFunctions.get(func); if (isEmpty == null) { isEmpty = PyUtil.isEmptyFunction(func); emptyFunctions.put(func, isEmpty); } if (isEmpty && !mayBeField) { continue; } } } boolean canRemove = !(PsiTreeUtil.getPrevSiblingOfType(element, PyParameter.class) instanceof PySingleStarParameter) || PsiTreeUtil.getNextSiblingOfType(element, PyParameter.class) != null; final List<LocalQuickFix> fixes = new ArrayList<>(); if (mayBeField) { fixes.add(new AddFieldQuickFix(name, name, containingClass.getName(), false)); } if (canRemove) { fixes.add(new PyRemoveParameterQuickFix()); } registerWarning(element, PyBundle.message("INSP.unused.locals.parameter.isnot.used", name), fixes.toArray(LocalQuickFix.EMPTY_ARRAY)); } else { if (myIgnoreTupleUnpacking && isTupleUnpacking(element)) { continue; } final PyForStatement forStatement = PyForStatementNavigator.getPyForStatementByIterable(element); if (forStatement != null) { if (!myIgnoreRangeIterationVariables || !isRangeIteration(forStatement)) { registerProblem(element, PyBundle.message("INSP.unused.locals.local.variable.isnot.used", name), ProblemHighlightType.LIKE_UNUSED_SYMBOL, null, new ReplaceWithWildCard()); } } else if (!myIgnoreVariablesStartingWithUnderscore || !name.startsWith(PyNames.UNDERSCORE)) { registerWarning(element, PyBundle.message("INSP.unused.locals.local.variable.isnot.used", name), new PyRemoveStatementQuickFix()); } } } } } private boolean isRangeIteration(@NotNull PyForStatement forStatement) { final PyExpression source = forStatement.getForPart().getSource(); if (!(source instanceof PyCallExpression)) { return false; } final PyCallExpression expr = (PyCallExpression)source; if (expr.isCalleeText("range", "xrange")) { final PyResolveContext resolveContext = PyResolveContext.noImplicits().withTypeEvalContext(myTypeEvalContext); final PyBuiltinCache builtinCache = PyBuiltinCache.getInstance(forStatement); return ContainerUtil.exists(expr.multiResolveCalleeFunction(resolveContext), builtinCache::isBuiltin); } return false; } private boolean ignoreUnusedParameters(PyFunction func, Set<PyFunction> functionsWithInheritors) { if (functionsWithInheritors.contains(func)) { return true; } if (!PyNames.INIT.equals(func.getName()) && PySuperMethodsSearch.search(func, myTypeEvalContext).findFirst() != null || PyOverridingMethodsSearch.search(func, true).findFirst() != null) { functionsWithInheritors.add(func); return true; } return false; } private boolean isTupleUnpacking(PsiElement element) { if (!(element instanceof PyTargetExpression)) { return false; } // Handling of the star expressions PsiElement parent = element.getParent(); if (parent instanceof PyStarExpression){ element = parent; parent = element.getParent(); } if (parent instanceof PyTupleExpression) { // if all the items of the tuple are unused, we still highlight all of them; if some are unused, we ignore final PyTupleExpression tuple = (PyTupleExpression)parent; for (PyExpression expression : tuple.getElements()) { if (expression instanceof PyStarExpression){ if (!myUnusedElements.contains(((PyStarExpression)expression).getExpression())){ return true; } } else if (!myUnusedElements.contains(expression)) { return true; } } } return false; } private void registerWarning(@NotNull final PsiElement element, final String msg, LocalQuickFix... quickfixes) { registerProblem(element, msg, ProblemHighlightType.LIKE_UNUSED_SYMBOL, null, quickfixes); } private static class ReplaceWithWildCard implements LocalQuickFix { @Override @NotNull public String getFamilyName() { return PyBundle.message("INSP.unused.locals.replace.with.wildcard"); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { PsiElement psiElement = descriptor.getPsiElement(); final PyFile pyFile = (PyFile) PyElementGenerator.getInstance(psiElement.getProject()).createDummyFile(LanguageLevel.getDefault(), "for _ in tuples:\n pass" ); final PyExpression target = ((PyForStatement)pyFile.getStatements().get(0)).getForPart().getTarget(); if (target != null) { psiElement.replace(target); } } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2018 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.accessinput; import java.math.BigDecimal; import java.util.List; import java.util.Map; import org.apache.commons.vfs2.FileName; import org.apache.commons.vfs2.FileObject; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettlePluginException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.fileinput.FileInputList; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaAndData; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaBoolean; import org.pentaho.di.core.row.value.ValueMetaDate; import org.pentaho.di.core.row.value.ValueMetaFactory; import org.pentaho.di.core.row.value.ValueMetaInteger; import org.pentaho.di.core.row.value.ValueMetaNone; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.resource.ResourceDefinition; import org.pentaho.di.resource.ResourceNamingInterface; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInjectionInterface; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; import com.healthmarketscience.jackcess.Column; import com.healthmarketscience.jackcess.DataType; @Step( id = "AccessInput", name = "BaseStep.TypeLongDesc.AccessInput", description = "BaseStep.TypeTooltipDesc.AccessInput", categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.Input", image = "images/ACI.svg", documentationUrl = "http://wiki.pentaho.com/display/EAI/Access+Input", i18nPackageName = "org.pentaho.di.trans.steps.accessinput" ) public class AccessInputMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = AccessInputMeta.class; // for i18n purposes, needed by Translator2!! public static final String[] RequiredFilesDesc = new String[] { BaseMessages.getString( PKG, "System.Combo.No" ), BaseMessages.getString( PKG, "System.Combo.Yes" ) }; public static final String[] RequiredFilesCode = new String[] { "N", "Y" }; /** Array of filenames */ private String[] fileName; /** Wildcard or filemask (regular expression) */ private String[] fileMask; /** Wildcard or filemask to exclude (regular expression) */ private String[] excludeFileMask; /** Array of boolean values as string, indicating if a file is required. */ private String[] fileRequired; /** Array of boolean values as string, indicating if we need to fetch sub folders. */ private String[] includeSubFolders; /** Flag indicating that we should include the filename in the output */ private boolean includeFilename; /** Flag indicating that we should include the tablename in the output */ private boolean includeTablename; /** Flag indicating that we should reset RowNum for each file */ private boolean resetRowNumber; /** The name of the field in the output containing the table name */ private String tablenameField; /** The name of the field in the output containing the filename */ private String filenameField; /** Flag indicating that a row number field should be included in the output */ private boolean includeRowNumber; /** The name of the field in the output containing the row number */ private String rowNumberField; /** The name of the table of the database */ private String TableName; /** The maximum number or lines to read */ private long rowLimit; /** The fields to import... */ private AccessInputField[] inputFields; /** file name from previous fields **/ private boolean filefield; private boolean isaddresult; private String dynamicFilenameField; private static final String YES = "Y"; public static final String[] type_trim_code = { "none", "left", "right", "both" }; /** Prefix that flags system tables */ public static final String PREFIX_SYSTEM = "MSys"; /** Additional fields **/ private String shortFileFieldName; private String pathFieldName; private String hiddenFieldName; private String lastModificationTimeFieldName; private String uriNameFieldName; private String rootUriNameFieldName; private String extensionFieldName; private String sizeFieldName; public AccessInputMeta() { super(); // allocate BaseStepMeta } /** * @return Returns the extensionFieldName. */ public String getExtensionField() { return extensionFieldName; } /** * @param field * The extensionFieldName to set. */ public void setExtensionField( String field ) { extensionFieldName = field; } /** * @return Returns the sizeFieldName. */ public String getSizeField() { return sizeFieldName; } /** * @param field * The sizeFieldName to set. */ public void setSizeField( String field ) { sizeFieldName = field; } /** * @return Returns the shortFileFieldName. */ public String getShortFileNameField() { return shortFileFieldName; } /** * @param field * The shortFileFieldName to set. */ public void setShortFileNameField( String field ) { shortFileFieldName = field; } /** * @return Returns the pathFieldName. */ public String getPathField() { return pathFieldName; } /** * @param field * The pathFieldName to set. */ public void setPathField( String field ) { this.pathFieldName = field; } /** * @return Returns the hiddenFieldName. */ public String isHiddenField() { return hiddenFieldName; } /** * @param field * The hiddenFieldName to set. */ @Deprecated public void setIsHiddenField( String field ) { setHiddenField( field ); } /** * @param field * The hiddenFieldName to set. */ public void setHiddenField( String field ) { hiddenFieldName = field; } /** * @return Returns the lastModificationTimeFieldName. */ public String getLastModificationDateField() { return lastModificationTimeFieldName; } /** * @param field * The lastModificationTimeFieldName to set. */ public void setLastModificationDateField( String field ) { lastModificationTimeFieldName = field; } /** * @return Returns the uriNameFieldName. */ public String getUriField() { return uriNameFieldName; } /** * @param field * The uriNameFieldName to set. */ public void setUriField( String field ) { uriNameFieldName = field; } /** * @return Returns the uriNameFieldName. */ public String getRootUriField() { return rootUriNameFieldName; } /** * @param field * The rootUriNameFieldName to set. */ public void setRootUriField( String field ) { rootUriNameFieldName = field; } /** * @return Returns the input fields. */ public AccessInputField[] getInputFields() { return inputFields; } /** * @param inputFields * The input fields to set. */ public void setInputFields( AccessInputField[] inputFields ) { this.inputFields = inputFields; } /** * @return Returns the excludeFileMask. */ public String[] getExludeFileMask() { return getExcludeFileMask(); } /** * @return Returns the excludeFileMask. */ public String[] getExcludeFileMask() { return excludeFileMask; } /** * @param excludeFileMask * The excludeFileMask to set. */ public void setExcludeFileMask( String[] excludeFileMask ) { this.excludeFileMask = excludeFileMask; } /** * @return Returns the fileMask. */ public String[] getFileMask() { return fileMask; } public String[] getFileRequired() { return fileRequired; } public String getRequiredFilesDesc( String tt ) { if ( tt == null ) { return RequiredFilesDesc[0]; } if ( tt.equals( RequiredFilesCode[1] ) ) { return RequiredFilesDesc[1]; } else { return RequiredFilesDesc[0]; } } /** * @param fileMask * The fileMask to set. */ public void setFileMask( String[] fileMask ) { this.fileMask = fileMask; } public void setFileRequired( String[] fileRequiredin ) { if ( fileRequiredin == null ) { this.fileRequired = new String[0]; return; } this.fileRequired = new String[fileRequiredin.length]; for ( int i = 0; i < fileRequiredin.length; i++ ) { this.fileRequired[i] = getRequiredFilesCode( fileRequiredin[i] ); } } public String getRequiredFilesCode( String tt ) { if ( tt == null ) { return RequiredFilesCode[0]; } if ( tt.equals( RequiredFilesDesc[1] ) ) { return RequiredFilesCode[1]; } else { return RequiredFilesCode[0]; } } public String[] getIncludeSubFolders() { return includeSubFolders; } public void setIncludeSubFolders( String[] includeSubFoldersin ) { if ( includeSubFoldersin == null ) { this.includeSubFolders = new String[0]; return; } this.includeSubFolders = new String[includeSubFoldersin.length]; for ( int i = 0; i < includeSubFoldersin.length; i++ ) { this.includeSubFolders[i] = getRequiredFilesCode( includeSubFoldersin[i] ); } } /** * @return Returns the fileName. */ public String[] getFileName() { return fileName; } /** * @param fileName * The fileName to set. */ public void setFileName( String[] fileName ) { this.fileName = fileName; } /** * @return Returns the filenameField. */ public String getFilenameField() { return filenameField; } /** * @return Returns the dynamic filename field (from previous steps) */ public String getDynamicFilenameField() { return dynamicFilenameField; } /** * @param dynamicFilenameField * The dynamic filename field to set. */ public void setDynamicFilenameField( String dynamicFilenameField ) { this.dynamicFilenameField = dynamicFilenameField; } /** * @param filenameField * The filenameField to set. */ public void setFilenameField( String filenameField ) { this.filenameField = filenameField; } /** * @return Returns the includeFilename. */ @Deprecated public boolean includeFilename() { return isIncludeFilename(); } /** * @return Returns the includeFilename. */ public boolean isIncludeFilename() { return includeFilename; } /** * @return Returns the includeTablename. */ @Deprecated public boolean includeTablename() { return isIncludeTablename(); } /** * @return Returns the includeTablename. */ public boolean isIncludeTablename() { return includeTablename; } /** * @param includeFilename * The includeFilename to set. */ public void setIncludeFilename( boolean includeFilename ) { this.includeFilename = includeFilename; } /** * @param includeTablename * The includeTablename to set. */ public void setIncludeTablename( boolean includeTablename ) { this.includeTablename = includeTablename; } /** * @return Returns the includeRowNumber. */ @Deprecated public boolean includeRowNumber() { return isIncludeRowNumber(); } /** * @return Returns the includeRowNumber. */ public boolean isIncludeRowNumber() { return includeRowNumber; } /** * @return Returns the File field. */ public boolean isFileField() { return filefield; } /** * @param filefield * The filefield to set. */ public void setFileField( boolean filefield ) { this.filefield = filefield; } /** * @return Returns the resetRowNumber. */ @Deprecated public boolean resetRowNumber() { return isResetRowNumber(); } /** * @return Returns the resetRowNumber. */ public boolean isResetRowNumber() { return resetRowNumber; } /** * @param includeRowNumber * The includeRowNumber to set. */ public void setIncludeRowNumber( boolean includeRowNumber ) { this.includeRowNumber = includeRowNumber; } /** * @param isaddresult * The isaddresult to set. */ public void setAddResultFile( boolean isaddresult ) { this.isaddresult = isaddresult; } /** * @return Returns isaddresult. */ public boolean isAddResultFile() { return isaddresult; } /** * @param resetRowNumber * The resetRowNumber to set. */ public void setResetRowNumber( boolean resetRowNumber ) { this.resetRowNumber = resetRowNumber; } /** * @return Returns the rowLimit. */ public long getRowLimit() { return rowLimit; } /** * @param rowLimit * The rowLimit to set. */ public void setRowLimit( long rowLimit ) { this.rowLimit = rowLimit; } /** * @return Returns the rowNumberField. */ public String getRowNumberField() { return rowNumberField; } /** * @return Returns the tablenameField. */ @Deprecated public String gettablenameField() { return getTablenameField(); } /** * @return Returns the tablenameField. */ public String getTablenameField() { return tablenameField; } /** * @return Returns the TableName. */ public String getTableName() { return TableName; } /** * @param rowNumberField * The rowNumberField to set. */ public void setRowNumberField( String rowNumberField ) { this.rowNumberField = rowNumberField; } /** * @param tablenameField * The tablenameField to set. */ public void setTablenameField( String tablenameField ) { this.tablenameField = tablenameField; } /** * @param TableName * The table name to set. */ public void setTableName( String TableName ) { this.TableName = TableName; } @Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } @Override public Object clone() { AccessInputMeta retval = (AccessInputMeta) super.clone(); int nrFiles = fileName.length; int nrFields = inputFields.length; retval.allocate( nrFiles, nrFields ); System.arraycopy( fileName, 0, retval.fileName, 0, nrFiles ); System.arraycopy( fileMask, 0, retval.fileMask, 0, nrFiles ); System.arraycopy( excludeFileMask, 0, retval.excludeFileMask, 0, nrFiles ); System.arraycopy( fileRequired, 0, retval.fileRequired, 0, nrFiles ); System.arraycopy( includeSubFolders, 0, retval.includeSubFolders, 0, nrFiles ); for ( int i = 0; i < nrFields; i++ ) { if ( inputFields[i] != null ) { retval.inputFields[i] = (AccessInputField) inputFields[i].clone(); } } return retval; } @Override public String getXML() { StringBuilder retval = new StringBuilder( 500 ); retval.append( " " ).append( XMLHandler.addTagValue( "include", includeFilename ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_field", filenameField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "tablename", includeTablename ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filename_Field", dynamicFilenameField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "tablename_field", tablenameField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownum", includeRowNumber ) ); retval.append( " " ).append( XMLHandler.addTagValue( "isaddresult", isaddresult ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", filefield ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownum_field", rowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "resetrownumber", resetRowNumber ) ); retval.append( " " ).append( XMLHandler.addTagValue( "table_name", TableName ) ); retval.append( " <file>" ).append( Const.CR ); for ( int i = 0; i < fileName.length; i++ ) { retval.append( " " ).append( XMLHandler.addTagValue( "name", fileName[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filemask", fileMask[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", excludeFileMask[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_required", fileRequired[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includeSubFolders[i] ) ); } retval.append( " </file>" ).append( Const.CR ); /* * Describe the fields to read */ retval.append( " <fields>" ).append( Const.CR ); for ( int i = 0; i < inputFields.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", inputFields[i].getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "attribut", inputFields[i].getColumn() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", inputFields[i].getTypeDesc() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "format", inputFields[i].getFormat() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "length", inputFields[i].getLength() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", inputFields[i].getPrecision() ) ); retval .append( " " ).append( XMLHandler.addTagValue( "currency", inputFields[i].getCurrencySymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "decimal", inputFields[i].getDecimalSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "group", inputFields[i].getGroupSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "trim_type", inputFields[i].getTrimTypeCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "repeat", inputFields[i].isRepeated() ) ); retval.append( " </field>" ).append( Const.CR ); } retval.append( " </fields>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "limit", rowLimit ) ); retval.append( " " ).append( XMLHandler.addTagValue( "shortFileFieldName", shortFileFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "pathFieldName", pathFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "hiddenFieldName", hiddenFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "lastModificationTimeFieldName", lastModificationTimeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "uriNameFieldName", uriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rootUriNameFieldName", rootUriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "extensionFieldName", extensionFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sizeFieldName", sizeFieldName ) ); return retval.toString(); } private void readData( Node stepnode ) throws KettleXMLException { try { includeFilename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "include" ) ); filenameField = XMLHandler.getTagValue( stepnode, "include_field" ); includeTablename = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "tablename" ) ); tablenameField = XMLHandler.getTagValue( stepnode, "tablename_field" ); includeRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "rownum" ) ); String addresult = XMLHandler.getTagValue( stepnode, "isaddresult" ); if ( Utils.isEmpty( addresult ) ) { isaddresult = true; } else { isaddresult = "Y".equalsIgnoreCase( addresult ); } filefield = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "filefield" ) ); rowNumberField = XMLHandler.getTagValue( stepnode, "rownum_field" ); TableName = XMLHandler.getTagValue( stepnode, "table_name" ); dynamicFilenameField = XMLHandler.getTagValue( stepnode, "filename_Field" ); resetRowNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "resetrownumber" ) ); Node filenode = XMLHandler.getSubNode( stepnode, "file" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrFiles = XMLHandler.countNodes( filenode, "name" ); int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { Node filenamenode = XMLHandler.getSubNodeByNr( filenode, "name", i ); Node filemasknode = XMLHandler.getSubNodeByNr( filenode, "filemask", i ); Node excludefilemasknode = XMLHandler.getSubNodeByNr( filenode, "exclude_filemask", i ); Node fileRequirednode = XMLHandler.getSubNodeByNr( filenode, "file_required", i ); Node includeSubFoldersnode = XMLHandler.getSubNodeByNr( filenode, "include_subfolders", i ); fileName[i] = XMLHandler.getNodeValue( filenamenode ); fileMask[i] = XMLHandler.getNodeValue( filemasknode ); excludeFileMask[i] = XMLHandler.getNodeValue( excludefilemasknode ); fileRequired[i] = XMLHandler.getNodeValue( fileRequirednode ); includeSubFolders[i] = XMLHandler.getNodeValue( includeSubFoldersnode ); } for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); inputFields[i] = new AccessInputField(); inputFields[i].setName( XMLHandler.getTagValue( fnode, "name" ) ); inputFields[i].setColumn( XMLHandler.getTagValue( fnode, "attribut" ) ); inputFields[i].setType( ValueMetaFactory.getIdForValueMeta( XMLHandler.getTagValue( fnode, "type" ) ) ); inputFields[i].setLength( Const.toInt( XMLHandler.getTagValue( fnode, "length" ), -1 ) ); inputFields[i].setPrecision( Const.toInt( XMLHandler.getTagValue( fnode, "precision" ), -1 ) ); String srepeat = XMLHandler.getTagValue( fnode, "repeat" ); inputFields[i].setTrimType( getTrimTypeByCode( XMLHandler.getTagValue( fnode, "trim_type" ) ) ); if ( srepeat != null ) { inputFields[i].setRepeated( YES.equalsIgnoreCase( srepeat ) ); } else { inputFields[i].setRepeated( false ); } inputFields[i].setFormat( XMLHandler.getTagValue( fnode, "format" ) ); inputFields[i].setCurrencySymbol( XMLHandler.getTagValue( fnode, "currency" ) ); inputFields[i].setDecimalSymbol( XMLHandler.getTagValue( fnode, "decimal" ) ); inputFields[i].setGroupSymbol( XMLHandler.getTagValue( fnode, "group" ) ); } // Is there a limit on the number of rows we process? rowLimit = Const.toLong( XMLHandler.getTagValue( stepnode, "limit" ), 0L ); shortFileFieldName = XMLHandler.getTagValue( stepnode, "shortFileFieldName" ); pathFieldName = XMLHandler.getTagValue( stepnode, "pathFieldName" ); hiddenFieldName = XMLHandler.getTagValue( stepnode, "hiddenFieldName" ); lastModificationTimeFieldName = XMLHandler.getTagValue( stepnode, "lastModificationTimeFieldName" ); uriNameFieldName = XMLHandler.getTagValue( stepnode, "uriNameFieldName" ); rootUriNameFieldName = XMLHandler.getTagValue( stepnode, "rootUriNameFieldName" ); extensionFieldName = XMLHandler.getTagValue( stepnode, "extensionFieldName" ); sizeFieldName = XMLHandler.getTagValue( stepnode, "sizeFieldName" ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } } public void allocate( int nrfiles, int nrfields ) { allocateFiles( nrfiles ); allocateFields( nrfields ); } public void allocateFiles( int nrfiles ) { fileName = new String[nrfiles]; fileMask = new String[nrfiles]; excludeFileMask = new String[nrfiles]; fileRequired = new String[nrfiles]; includeSubFolders = new String[nrfiles]; } public void allocateFields( int nrfields ) { inputFields = new AccessInputField[nrfields]; } @Override public void setDefault() { shortFileFieldName = null; pathFieldName = null; hiddenFieldName = null; lastModificationTimeFieldName = null; uriNameFieldName = null; rootUriNameFieldName = null; extensionFieldName = null; sizeFieldName = null; isaddresult = true; filefield = false; includeFilename = false; filenameField = ""; includeTablename = false; tablenameField = ""; includeRowNumber = false; rowNumberField = ""; TableName = ""; dynamicFilenameField = ""; int nrFiles = 0; int nrFields = 0; allocate( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { fileName[i] = "filename" + ( i + 1 ); fileMask[i] = ""; excludeFileMask[i] = ""; fileRequired[i] = RequiredFilesCode[0]; includeSubFolders[i] = RequiredFilesCode[0]; } for ( int i = 0; i < nrFields; i++ ) { inputFields[i] = new AccessInputField( "field" + ( i + 1 ) ); } rowLimit = 0; } @Override public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int i; int nr = inputFields == null ? 0 : inputFields.length; for ( i = 0; i < nr; i++ ) { AccessInputField field = inputFields[i]; int type = field.getType(); if ( type == ValueMetaInterface.TYPE_NONE ) { type = ValueMetaInterface.TYPE_STRING; } ValueMetaInterface v; try { v = ValueMetaFactory.createValueMeta( space.environmentSubstitute( field.getName() ), type ); } catch ( KettlePluginException e ) { v = new ValueMetaNone( space.environmentSubstitute( field.getName() ) ); } v.setLength( field.getLength() ); v.setPrecision( field.getPrecision() ); v.setOrigin( name ); v.setConversionMask( field.getFormat() ); v.setDecimalSymbol( field.getDecimalSymbol() ); v.setGroupingSymbol( field.getGroupSymbol() ); v.setCurrencySymbol( field.getCurrencySymbol() ); v.setTrimType( field.getTrimType() ); r.addValueMeta( v ); } if ( includeFilename ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( filenameField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeTablename ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( tablenameField ) ); v.setLength( 250 ); v.setPrecision( -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeRowNumber ) { ValueMetaInterface v = new ValueMetaInteger( space.environmentSubstitute( rowNumberField ) ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0 ); v.setOrigin( name ); r.addValueMeta( v ); } // Add additional fields if ( getShortFileNameField() != null && getShortFileNameField().length() > 0 ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( getShortFileNameField() ) ); v.setLength( 100, -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( getExtensionField() != null && getExtensionField().length() > 0 ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( getExtensionField() ) ); v.setLength( 100, -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( getPathField() != null && getPathField().length() > 0 ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( getPathField() ) ); v.setLength( 100, -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( getSizeField() != null && getSizeField().length() > 0 ) { ValueMetaInterface v = new ValueMetaInteger( space.environmentSubstitute( getSizeField() ) ); v.setOrigin( name ); v.setLength( 9 ); r.addValueMeta( v ); } if ( isHiddenField() != null && isHiddenField().length() > 0 ) { ValueMetaInterface v = new ValueMetaBoolean( space.environmentSubstitute( isHiddenField() ) ); v.setOrigin( name ); r.addValueMeta( v ); } if ( getLastModificationDateField() != null && getLastModificationDateField().length() > 0 ) { ValueMetaInterface v = new ValueMetaDate( space.environmentSubstitute( getLastModificationDateField() ) ); v.setOrigin( name ); r.addValueMeta( v ); } if ( getUriField() != null && getUriField().length() > 0 ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( getUriField() ) ); v.setLength( 100, -1 ); v.setOrigin( name ); r.addValueMeta( v ); } if ( getRootUriField() != null && getRootUriField().length() > 0 ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( getRootUriField() ) ); v.setLength( 100, -1 ); v.setOrigin( name ); r.addValueMeta( v ); } } public static final int getTrimTypeByCode( String tt ) { if ( tt != null ) { for ( int i = 0; i < type_trim_code.length; i++ ) { if ( type_trim_code[i].equalsIgnoreCase( tt ) ) { return i; } } } return 0; } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { includeFilename = rep.getStepAttributeBoolean( id_step, "include" ); filenameField = rep.getStepAttributeString( id_step, "include_field" ); TableName = rep.getStepAttributeString( id_step, "table_name" ); includeTablename = rep.getStepAttributeBoolean( id_step, "tablename" ); dynamicFilenameField = rep.getStepAttributeString( id_step, "filename_Field" ); tablenameField = rep.getStepAttributeString( id_step, "tablename_field" ); includeRowNumber = rep.getStepAttributeBoolean( id_step, "rownum" ); String addresult = rep.getStepAttributeString( id_step, "isaddresult" ); if ( Utils.isEmpty( addresult ) ) { isaddresult = true; } else { isaddresult = rep.getStepAttributeBoolean( id_step, "isaddresult" ); } filefield = rep.getStepAttributeBoolean( id_step, "filefield" ); rowNumberField = rep.getStepAttributeString( id_step, "rownum_field" ); resetRowNumber = rep.getStepAttributeBoolean( id_step, "reset_rownumber" ); rowLimit = rep.getStepAttributeInteger( id_step, "limit" ); int nrFiles = rep.countNrStepAttributes( id_step, "file_name" ); int nrFields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrFiles, nrFields ); for ( int i = 0; i < nrFiles; i++ ) { fileName[i] = rep.getStepAttributeString( id_step, i, "file_name" ); fileMask[i] = rep.getStepAttributeString( id_step, i, "file_mask" ); excludeFileMask[i] = rep.getStepAttributeString( id_step, i, "exclude_file_mask" ); fileRequired[i] = rep.getStepAttributeString( id_step, i, "file_required" ); if ( !YES.equalsIgnoreCase( fileRequired[i] ) ) { fileRequired[i] = RequiredFilesCode[0]; } includeSubFolders[i] = rep.getStepAttributeString( id_step, i, "include_subfolders" ); if ( !YES.equalsIgnoreCase( includeSubFolders[i] ) ) { includeSubFolders[i] = RequiredFilesCode[0]; } } for ( int i = 0; i < nrFields; i++ ) { AccessInputField field = new AccessInputField(); field.setName( rep.getStepAttributeString( id_step, i, "field_name" ) ); field.setColumn( rep.getStepAttributeString( id_step, i, "field_attribut" ) ); field.setType( ValueMetaFactory.getIdForValueMeta( rep.getStepAttributeString( id_step, i, "field_type" ) ) ); field.setFormat( rep.getStepAttributeString( id_step, i, "field_format" ) ); field.setCurrencySymbol( rep.getStepAttributeString( id_step, i, "field_currency" ) ); field.setDecimalSymbol( rep.getStepAttributeString( id_step, i, "field_decimal" ) ); field.setGroupSymbol( rep.getStepAttributeString( id_step, i, "field_group" ) ); field.setLength( (int) rep.getStepAttributeInteger( id_step, i, "field_length" ) ); field.setPrecision( (int) rep.getStepAttributeInteger( id_step, i, "field_precision" ) ); field.setTrimType( AccessInputField.getTrimTypeByCode( rep.getStepAttributeString( id_step, i, "field_trim_type" ) ) ); field.setRepeated( rep.getStepAttributeBoolean( id_step, i, "field_repeat" ) ); inputFields[i] = field; } shortFileFieldName = rep.getStepAttributeString( id_step, "shortFileFieldName" ); pathFieldName = rep.getStepAttributeString( id_step, "pathFieldName" ); hiddenFieldName = rep.getStepAttributeString( id_step, "hiddenFieldName" ); lastModificationTimeFieldName = rep.getStepAttributeString( id_step, "lastModificationTimeFieldName" ); uriNameFieldName = rep.getStepAttributeString( id_step, "uriNameFieldName" ); rootUriNameFieldName = rep.getStepAttributeString( id_step, "rootUriNameFieldName" ); extensionFieldName = rep.getStepAttributeString( id_step, "extensionFieldName" ); sizeFieldName = rep.getStepAttributeString( id_step, "sizeFieldName" ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "AccessInputMeta.Exception.ErrorReadingRepository" ), e ); } } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "include", includeFilename ); rep.saveStepAttribute( id_transformation, id_step, "include_field", filenameField ); rep.saveStepAttribute( id_transformation, id_step, "tablename", includeTablename ); rep.saveStepAttribute( id_transformation, id_step, "tablename_field", tablenameField ); rep.saveStepAttribute( id_transformation, id_step, "rownum", includeRowNumber ); rep.saveStepAttribute( id_transformation, id_step, "isaddresult", isaddresult ); rep.saveStepAttribute( id_transformation, id_step, "filefield", filefield ); rep.saveStepAttribute( id_transformation, id_step, "filename_Field", dynamicFilenameField ); rep.saveStepAttribute( id_transformation, id_step, "rownum_field", rowNumberField ); rep.saveStepAttribute( id_transformation, id_step, "limit", rowLimit ); rep.saveStepAttribute( id_transformation, id_step, "table_name", TableName ); rep.saveStepAttribute( id_transformation, id_step, "reset_rownumber", resetRowNumber ); for ( int i = 0; i < fileName.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "file_name", fileName[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "file_mask", fileMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "exclude_file_mask", excludeFileMask[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "file_required", fileRequired[i] ); rep.saveStepAttribute( id_transformation, id_step, i, "include_subfolders", includeSubFolders[i] ); } for ( int i = 0; i < inputFields.length; i++ ) { AccessInputField field = inputFields[i]; rep.saveStepAttribute( id_transformation, id_step, i, "field_name", field.getName() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_attribut", field.getColumn() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_type", field.getTypeDesc() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_format", field.getFormat() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_currency", field.getCurrencySymbol() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_decimal", field.getDecimalSymbol() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_group", field.getGroupSymbol() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_length", field.getLength() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_precision", field.getPrecision() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_trim_type", field.getTrimTypeCode() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_repeat", field.isRepeated() ); } rep.saveStepAttribute( id_transformation, id_step, "shortFileFieldName", shortFileFieldName ); rep.saveStepAttribute( id_transformation, id_step, "pathFieldName", pathFieldName ); rep.saveStepAttribute( id_transformation, id_step, "hiddenFieldName", hiddenFieldName ); rep.saveStepAttribute( id_transformation, id_step, "lastModificationTimeFieldName", lastModificationTimeFieldName ); rep.saveStepAttribute( id_transformation, id_step, "uriNameFieldName", uriNameFieldName ); rep.saveStepAttribute( id_transformation, id_step, "rootUriNameFieldName", rootUriNameFieldName ); rep.saveStepAttribute( id_transformation, id_step, "extensionFieldName", extensionFieldName ); rep.saveStepAttribute( id_transformation, id_step, "sizeFieldName", sizeFieldName ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "AccessInputMeta.Exception.ErrorSavingToRepository", "" + id_step ), e ); } } public FileInputList getFiles( VariableSpace space ) { return FileInputList.createFileList( space, fileName, fileMask, excludeFileMask, fileRequired, includeSubFolderBoolean() ); } private boolean[] includeSubFolderBoolean() { int len = fileName.length; boolean[] includeSubFolderBoolean = new boolean[len]; for ( int i = 0; i < len; i++ ) { includeSubFolderBoolean[i] = YES.equalsIgnoreCase( includeSubFolders[i] ); } return includeSubFolderBoolean; } @Override public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; // See if we get input... if ( input.length > 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "AccessInputMeta.CheckResult.NoInputExpected" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "AccessInputMeta.CheckResult.NoInput" ), stepMeta ); remarks.add( cr ); } FileInputList fileInputList = getFiles( transMeta ); // String files[] = getFiles(); if ( fileInputList == null || fileInputList.getFiles().size() == 0 ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "AccessInputMeta.CheckResult.NoFiles" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "AccessInputMeta.CheckResult.FilesOk", "" + fileInputList.getFiles().size() ), stepMeta ); remarks.add( cr ); } // Check table if ( Utils.isEmpty( getTableName() ) ) { cr = new CheckResult( CheckResult.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "AccessInputMeta.CheckResult.NoFiles" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResult.TYPE_RESULT_OK, BaseMessages.getString( PKG, "AccessInputMeta.CheckResult.FilesOk", "" + fileInputList.getFiles().size() ), stepMeta ); remarks.add( cr ); } } public static String getFilename( FileObject fileObject ) { FileName fileName = fileObject.getName(); String root = fileName.getRootURI(); if ( !root.startsWith( "file:" ) ) { return fileName.getURI(); } if ( root.endsWith( ":/" ) ) { root = root.substring( 8, 10 ); } else { root = root.substring( 7, root.length() - 1 ); } String fileString = root + fileName.getPath(); if ( !"/".equals( Const.FILE_SEPARATOR ) ) { fileString = Const.replace( fileString, "/", Const.FILE_SEPARATOR ); } return fileString; } @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ) { return new AccessInput( stepMeta, stepDataInterface, cnr, tr, trans ); } @Override public StepDataInterface getStepData() { return new AccessInputData(); } @Override public boolean supportsErrorHandling() { return true; } /** * @param space * the variable space to use * @param definitions * @param resourceNamingInterface * @param repository * The repository to optionally load other resources from (to be converted to XML) * @param metaStore * the metaStore in which non-kettle metadata could reside. * * @return the filename of the exported resource */ public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { // The object that we're modifying here is a copy of the original! // So let's change the filename from relative to absolute by grabbing the file object... // // Replace the filename ONLY (folder or filename) // for ( int i = 0; i < fileName.length; i++ ) { FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName[i] ), space ); fileName[i] = resourceNamingInterface.nameResource( fileObject, space, Utils.isEmpty( fileMask[i] ) ); } return null; } catch ( Exception e ) { throw new KettleException( e ); } } /** * Returns kettle type from Microsoft Access database * * @param : MS Access column * @return valuemeta */ public static ValueMetaInterface getValueMeta( Column c ) { // get value ValueMetaAndData vmd = getValueMetaAndData( c, null, null ); if ( vmd != null ) { // returns meta return vmd.getValueMeta(); } return null; } /** * Returns kettle type from Microsoft Access database also convert data to prepare kettle value * * @param : MS Access column * @param : destination field name * @param : MS Access column value * @return valuemeta and data */ public static ValueMetaAndData getValueMetaAndData( Column c, String name, Object data ) { ValueMetaAndData valueMetaData = new ValueMetaAndData(); // get data Object o = data; // Get column type DataType type = c.getType(); int sourceValueType = ValueMetaInterface.TYPE_STRING; // Find corresponding Kettle type for each MS Access type // We have to take of Meta AND data switch ( type ) { case BINARY: sourceValueType = ValueMetaInterface.TYPE_BINARY; break; case BOOLEAN: sourceValueType = ValueMetaInterface.TYPE_BOOLEAN; if ( o != null ) { o = Boolean.valueOf( o.toString() ); } break; case DOUBLE: sourceValueType = ValueMetaInterface.TYPE_NUMBER; break; case FLOAT: sourceValueType = ValueMetaInterface.TYPE_BIGNUMBER; if ( o != null ) { o = new BigDecimal( Float.toString( (Float) o ) ); } break; case INT: sourceValueType = ValueMetaInterface.TYPE_NUMBER; if ( o != null ) { o = Double.parseDouble( o.toString() ); } break; case BYTE: sourceValueType = ValueMetaInterface.TYPE_NUMBER; if ( o != null ) { o = Double.parseDouble( o.toString() ); } break; case LONG: sourceValueType = ValueMetaInterface.TYPE_INTEGER; if ( o != null ) { Integer i = (Integer) o; o = i.longValue(); } break; case MEMO: // Should be considered as String break; case MONEY: sourceValueType = ValueMetaInterface.TYPE_BIGNUMBER; break; case NUMERIC: sourceValueType = ValueMetaInterface.TYPE_BIGNUMBER; break; case SHORT_DATE_TIME: sourceValueType = ValueMetaInterface.TYPE_DATE; break; default: // Default it's string if ( o != null ) { o = o.toString(); } break; } ValueMetaInterface sourceValueMeta; try { sourceValueMeta = ValueMetaFactory.createValueMeta( name == null ? c.getName() : name, sourceValueType ); } catch ( KettlePluginException e ) { sourceValueMeta = new ValueMetaNone( name == null ? c.getName() : name ); } sourceValueMeta.setLength( c.getLength(), c.getPrecision() ); // set value meta data and return it valueMetaData.setValueMeta( sourceValueMeta ); if ( o != null ) { valueMetaData.setValueData( o ); } return valueMetaData; } @Override public StepMetaInjectionInterface getStepMetaInjectionInterface() { return new AccessInputMetaInjection( this ); } }
package lv.emes.libraries.communication.db; import lv.emes.libraries.testdata.TestData; import lv.emes.libraries.tools.lists.MS_List; import org.junit.*; import org.junit.runners.MethodSorters; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Map; import static org.assertj.core.api.Assertions.assertThat; @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class MS_MySQLDatabaseTest { private static MS_JDBCDatabase db; @BeforeClass //Before even start testing do some preparations! public static void initTestPreConditions() { db = new MS_MySQLDatabase(new MS_DBParameters() .withHostname(TestData.TESTING_SERVER_HOSTAME) .withDbName("test") .withUserName("test_user") .withPassword("test_user") .withPort(3306) ); db.initialize(); assertThat(db.isOnline()).isTrue(); } @AfterClass //After all tests perform actions that cleans everything up! public static void finalizeTestConditions() { db.disconnect(); } @Before //Before every test do initial setup! public void setUpForEachTest() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st = con.prepareQuery("insert into tests(id, name, count) values (1, 'test1', 33)"); con.executeQuery(st); st = con.prepareQuery("insert into tests(id, name, count) values (2, 'test2', 22)"); con.executeQuery(st); st = con.prepareQuery("insert into tests(id, name, count) values (3, 'test3', 11)"); con.executeQuery(st); con.finishWork(); } } @After //After every test tear down this mess! public void tearDownForEachTest() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st = con.prepareQuery("delete from tests"); con.executeQuery(st); } } @Test public void test00GetQueryResultWithoutStmtPreparation() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { String query = "select * from tests"; ResultSet rs = con.getQueryResult(query); assertThat(rs.next()).isTrue(); assertThat(rs.getString("id")).isEqualTo("1"); assertThat(rs.getInt("id")).isEqualTo(1); assertThat(rs.getString("name")).isEqualTo("test1"); assertThat(rs.getInt("count")).isEqualTo(33); } } @Test public void test01GetCellValuesByName() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st; String query = "select * from tests"; st = con.prepareQuery(query); ResultSet rs = con.getQueryResult(st); assertThat(rs.next()).isTrue(); assertThat(rs.getString("id")).isEqualTo("1"); assertThat(rs.getInt("id")).isEqualTo(1); assertThat(rs.getString("name")).isEqualTo("test1"); assertThat(rs.getInt("count")).isEqualTo(33); assertThat(rs.next()).isTrue(); //second record assertThat(rs.getInt("count")).isEqualTo(22); assertThat(rs.getString("name")).isEqualTo("test2"); assertThat(rs.getInt("id")).isEqualTo(2); assertThat(rs.next()).isTrue(); //third record assertThat(rs.getInt(3)).isEqualTo(11); assertThat(rs.getString(2)).isEqualTo("test3"); assertThat(rs.getInt(1)).isEqualTo(3); } } @Test public void test02GetSecondRecord() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st; String query = "select * from tests where id=2"; st = con.prepareQuery(query); ResultSet rs = con.getQueryResult(st); assertThat(rs.next()).isTrue(); assertThat(rs.getInt(1)).isEqualTo(2); assertThat(rs.getString(2)).isEqualTo("test2"); assertThat(rs.getString(3)).isEqualTo("22"); assertThat(rs.getInt(3)).isEqualTo(22); } } @Test public void test03QueryWithParams() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st; String query = "select * from tests where id=?"; ResultSet rs; st = con.prepareQuery(query); st.setInt(1, 3); rs = con.getQueryResult(st); assertThat(rs.next()).isTrue(); assertThat(rs.getString(1)).isEqualTo("3"); assertThat(rs.getString(2)).isEqualTo("test3"); assertThat(rs.getInt(3)).isEqualTo(11); } } @Test public void test04Editing() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st; String query = "update tests set name='Oswald' where id=1"; ResultSet rs; st = con.prepareQuery(query); con.executeQuery(st); //now to look at the changes! query = "select * from tests where id=1"; st = con.prepareQuery(query); rs = con.getQueryResult(st); assertThat(rs.next()).isTrue(); assertThat(rs.getString(2)).isEqualTo("Oswald"); assertThat(rs.getInt(3)).isEqualTo(33); } } @Test public void test05SelectAll() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st; String query = "select * from tests"; ResultSet rs; st = con.prepareQuery(query); rs = con.getQueryResult(st); int i = 0; while (rs.next()) { i++; assertThat(rs.getInt(1)).isEqualTo(i); assertThat(rs.getString(2)).isEqualTo("test" + i); } assertThat(i).isEqualTo(3); } } @Test public void test06TableRecordTest() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st; String query = "select * from tests"; ResultSet rs; st = con.prepareQuery(query); rs = con.getQueryResult(st); MS_List<Table_tests_Row> testsTable = MS_ResultSetExtractingUtils.extractList(rs, Table_tests_Row.class); assertThat(testsTable.count()).isEqualTo(3); for (int i = 0; i < testsTable.count(); i++) { assertThat(testsTable.get(i).id).isEqualTo(i + 1); assertThat(testsTable.get(i).name).isEqualTo("test" + (i + 1)); } } } @Test public void test07TableUniqueRecordTest() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st; String query = "select * from tests"; ResultSet rs; st = con.prepareQuery(query); rs = con.getQueryResult(st); Map<Integer, Table_tests_Row> testsTable = MS_ResultSetExtractingUtils.extractMap(rs, Table_tests_Row.class); assertThat(testsTable.size()).isEqualTo(3); assertThat(testsTable.get(1).name).isEqualTo("test1"); assertThat(testsTable.get(2).name).isEqualTo("test2"); assertThat(testsTable.get(3).name).isEqualTo("test3"); assertThat(testsTable.get(1).count).isEqualTo(33); assertThat(testsTable.get(2).id).isEqualTo(2); assertThat(testsTable.get(3).count).isEqualTo(11); } } @Test public void test08GetCount() throws Exception { try (MS_ConnectionSession con = db.getConnectionSession()) { MS_PreparedSQLQuery st; String query = "select count(*) from tests"; ResultSet rs; st = con.prepareQuery(query); rs = con.getQueryResult(st); assertThat(MS_ResultSetExtractingUtils.extractRecord(rs, MS_TableRecordCount.class).getCount()).isEqualTo(3); } } @Test(expected = NullPointerException.class) public void test11FailToInitializeWithNullParams() { MS_MySQLDatabase database = new MS_MySQLDatabase(new MS_DBParameters() .withHostname(TestData.TESTING_SERVER_HOSTAME) .withDbName(null) .withUserName(null) .withPassword(null) .withPort(0) ); database.initialize(); } private static class Table_tests_Row implements MS_TableUniqueRecord<Integer> { int id; String name; int count; public Table_tests_Row() { } @Override public Integer getUniqueFieldValue() { return id; } @Override public void initColumns(ResultSet rs) throws SQLException { id = rs.getInt("id"); name = rs.getString("name"); count = rs.getInt("count"); } } }
/******************************************************************************* * ALMA - Atacama Large Millimeter Array * Copyright (c) ESO - European Southern Observatory, 2011 * (in the framework of the ALMA collaboration). * All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA *******************************************************************************/ package alma.acs.profiling.orb; import java.util.HashMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import org.jacorb.orb.acs.AcsORBProfiler; import alma.acs.logging.AcsLogger; import alma.acs.logging.RepeatGuard; import alma.acs.util.IsoDateFormat; /** * Profiler implementation that can be used as a base class (or directly) for ORB profiling * of containers, manager, OMC etc. * <p> * If registered with the ORB, this class will collect ORB callbacks and log statistics every 10 seconds, * see {@link #checkAndLogStatus()}. * <p> * To get logs for every ORB callback (only available for stdout), use {@link #DEBUG_CONFIG_PROPERTYNAME}. * * @author msekoran (first impl as cdb_rdb :: ORBRequestTimer), hsommer */ public class AcsORBProfilerImplBase implements AcsORBProfiler { /** * This property can optionally be set to one or more (comma and/or space separated) names of * callback methods as defined in {@link AcsORBProfiler}, whose invocations will then be printed to stdout * in addition to the normal profiling summary statistics that gets logged. * <p> * Example: <code>-Dalma.acs.profiling.orb.debugLogs='requestFinished,undeliveredRequest requestQueueSizeChanged'"</code> */ public static final String DEBUG_CONFIG_PROPERTYNAME = "alma.acs.profiling.orb.debugLogs"; /** * Logger passed in the c'tor. */ protected final AcsLogger logger; /** * Repeat guard wrapped around {@link #logger}, to control the number of ORB status messages logged. * @see #checkAndLogStatus() */ private final RepeatGuard orbStatusLogRepeatGuard; /** * Percentage of busy threads in the connection thread pool. */ protected volatile int connectionPoolUsePercent; /** * Number of undelivered requests since the last ORB profiler status log. * @see #undeliveredRequest(int, String, String, boolean) */ private final AtomicInteger undeliveredRequests = new AtomicInteger(0); /** * Maximum request queue usage in percent, for the queue owned by the POA given in {@link #requestQueueMaxUsePOA}. * <p> * Note about usage of AtomicInteger: What we really would like to use is an AtomicReference class * whose value is a custom class that bundles together an "int requestQueueMaxUsePercent" * and "String requestQueueMaxUsePOA" while offering a method "compareGreaterEqualsAndSet" * that can swap a new "usepercent / poa" pair if the new integer is greater or equal than the old one. * In this way we could get rid of the synchronized blocks in methods requestQueueSizeChanged and logStatus. * Since such an "enhanced AtomicReference" is not available and I don't dare to write one, we do use * synchronized blocks, and "abuse" this AtomicInteger as a monitor which otherwise would have to be a separate field. */ private final AtomicInteger requestQueueMaxUsePercent = new AtomicInteger(0); /** * Name of the POA who got the longest request queue (see {@link #requestQueueMaxUsePercent}) * since the last ORB profiler status log. */ private volatile String requestQueueMaxUsePOA = "---"; /** * Used to trace a request from {@link #requestStarted(int, String, String)} * to {@link #requestFinished(int, String, String)}. */ private final HashMap<ThreadRequestId, Long> requestTimeMap = new HashMap<ThreadRequestId,Long>(); /** * Can be set via {@link #DEBUG_CONFIG_PROPERTYNAME}. * We convert to explicit booleans instead of string-based lookup to improve performance. */ protected boolean debugConnectionThreadPoolSizeChanged = false; protected boolean debugUndeliveredRequest = false; protected boolean debugRequestQueueSizeChanged = false; protected boolean debugThreadPoolSizeChanged = false; protected boolean debugRequestStarted = false; protected boolean debugRequestFinished = false; public AcsORBProfilerImplBase(AcsLogger logger) { this.logger = logger; orbStatusLogRepeatGuard = new RepeatGuard(30, TimeUnit.SECONDS, -1); String debugConfig = System.getProperty(DEBUG_CONFIG_PROPERTYNAME); if (debugConfig != null) { String[] debugMethodNames = debugConfig.split("[ ,]+"); for (int i = 0; i < debugMethodNames.length; i++) { if (debugMethodNames[i].equals("connectionThreadPoolSizeChanged")) { debugConnectionThreadPoolSizeChanged = true; } else if (debugMethodNames[i].equals("undeliveredRequest")) { debugUndeliveredRequest = true; } else if (debugMethodNames[i].equals("requestQueueSizeChanged")) { debugRequestQueueSizeChanged = true; } else if (debugMethodNames[i].equals("threadPoolSizeChanged")) { debugThreadPoolSizeChanged = true; } else if (debugMethodNames[i].equals("requestStarted")) { debugRequestStarted = true; } else if (debugMethodNames[i].equals("requestFinished")) { debugRequestFinished = true; } } } } @Override public void connectionThreadPoolSizeChanged(int idleThreads, int totalThreads, int maxThreads) { if (debugConnectionThreadPoolSizeChanged) { System.out.println(IsoDateFormat.formatCurrentDate() + " connectionThreadPoolSizeChanged: idleThreads=" + idleThreads + ", totalThreads=" + totalThreads + ", maxThreads=" + maxThreads); } connectionPoolUsePercent = (int)(((totalThreads-idleThreads)/(double)maxThreads)*100); checkAndLogStatus(); } @Override public void undeliveredRequest(int messageSize, String poaName, String operation, boolean causedByQueueFull) { if (debugUndeliveredRequest) { System.out.println(IsoDateFormat.formatCurrentDate() + " undeliveredRequest: messageSize=" + messageSize + ", poaName=" + poaName + ", operation=" + operation); } undeliveredRequests.incrementAndGet(); checkAndLogStatus(); } /** * Only records the maximum queue length and the POA name owning that queue. * For more information, we could record the queue lengths for all POAs, but then have to watch out that * the backing map etc structure does not overflow when different POAs come and go. * @see org.jacorb.orb.acs.AcsORBProfiler#requestQueueSizeChanged(int, java.lang.String, int, int) */ @Override public void requestQueueSizeChanged(int requestId, String poaName, int queueSize, int maxQueueLength) { if (debugRequestQueueSizeChanged) { System.out.println(IsoDateFormat.formatCurrentDate() + " requestQueueSizeChanged: requestId=" + requestId + ", poaName=" + poaName + ", queueSize=" + queueSize + ", maxQueueLength=" + maxQueueLength); } int requestQueueUsePercent = (int)((queueSize/(double)maxQueueLength)*100); synchronized (requestQueueMaxUsePercent) { // cannot compare >= and include requestQueueMaxUsePOA with just AtomicInteger methods. if (requestQueueUsePercent >= requestQueueMaxUsePercent.get()) { requestQueueMaxUsePercent.set(requestQueueUsePercent); requestQueueMaxUsePOA = poaName; } } checkAndLogStatus(); } @Override public void threadPoolSizeChanged(String poaName, int idleThreads, int totalThreads, int maxThreads) { if (debugThreadPoolSizeChanged) { System.out.println(IsoDateFormat.formatCurrentDate() + " threadPoolSizeChanged: poaName=" + poaName + ", idleThreads=" + idleThreads + ", totalThreads=" + totalThreads + ", maxThreads=" + maxThreads); } checkAndLogStatus(); } /** * Uses requestId and threadId to trace a call from requestStarted to requestFinished. * <p> * Note that multiple clients may generate the same requestId concurrently, * and that at least for JacORB a single client may generate the same requestId concurrently for different ClientConnections. * <p> * TODO: Wouldn't using only the thread ID be good enough? */ static final class ThreadRequestId { private long threadId; private int requestId; public ThreadRequestId(long threadId, int requestId) { this.threadId = threadId; this.requestId = requestId; } @Override public int hashCode() { return (int)threadId * 911 + requestId; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; ThreadRequestId other = (ThreadRequestId) obj; if (requestId != other.requestId) return false; if (threadId != other.threadId) return false; return true; } } @Override public void requestStarted(int requestId, String poaName, String operation) { long threadId = Thread.currentThread().getId(); if (debugRequestStarted) { System.out.println(IsoDateFormat.formatCurrentDate() + " requestStarted(" + requestId + ", " + poaName + ", " + operation + ", " + threadId + ")"); } synchronized (requestTimeMap) { requestTimeMap.put(new ThreadRequestId(threadId, requestId), System.currentTimeMillis()); } } @Override public void requestFinished(int requestId, String poaName, String operation) { synchronized (requestTimeMap) { long threadId = Thread.currentThread().getId(); Long startTime = requestTimeMap.remove(new ThreadRequestId(threadId, requestId)); if (startTime != null) { long timeSpent = System.currentTimeMillis() - startTime.longValue(); if (debugRequestFinished) { System.out.println(IsoDateFormat.formatCurrentDate() + " requestFinished(" + requestId + ", " + poaName + ", " + operation + ", " + threadId + ") in " + timeSpent + " ms"); } } else { if (debugRequestFinished) { System.out.println(IsoDateFormat.formatCurrentDate() + " requestFinished(" + requestId + ", " + poaName + ", " + operation + ", " + threadId + ")"); } } } } /** * Logs the ORB status and resets {@link #undeliveredRequests}, {@link #requestQueueMaxUsePercent}. * We use repeat guard {@link #orbStatusLogRepeatGuard} so that at most one status message * gets logged per configured time interval, while nothing is logged if the ORB does not get called. * <p> * Subclasses that want to modify the log message should override {@link #logStatus(String)}. */ private void checkAndLogStatus() { if (orbStatusLogRepeatGuard.checkAndIncrement()) { String msg = null; int snapshotConnectionPoolUsePercent = -1; int snapshotUndeliveredRequests = -1; int snapshotRequestQueueMaxUsePercent = -1; String snapshotRequestQueueMaxUsePOA = null; synchronized (requestQueueMaxUsePercent) { snapshotConnectionPoolUsePercent = connectionPoolUsePercent; snapshotUndeliveredRequests = undeliveredRequests.getAndSet(0); snapshotRequestQueueMaxUsePercent = requestQueueMaxUsePercent.getAndSet(0); snapshotRequestQueueMaxUsePOA = requestQueueMaxUsePOA; requestQueueMaxUsePOA = "---"; } msg = "ORB status: connectionThreadsUsed=" + snapshotConnectionPoolUsePercent + "%, lost calls=" + snapshotUndeliveredRequests + ", requestQueueMaxUsePercent=" + snapshotRequestQueueMaxUsePercent + "% (in POA '" + snapshotRequestQueueMaxUsePOA + "')."; logStatus(msg, Level.FINE, snapshotConnectionPoolUsePercent, snapshotUndeliveredRequests, snapshotRequestQueueMaxUsePercent, snapshotRequestQueueMaxUsePOA); } } /** * This method is broken out from {@link #checkAndLogStatus()} so that subclasses can change or suppress the log message, * without having to worry about log repeat guard or synchronization. * * @param defaultLogMessage A default log message, that can be used or replaced by another message. * @param defaultLogLevel A suggested log level. * @param connectionPoolUsePercent See {@link #connectionPoolUsePercent}. Can be used to build a custom message. * @param undeliveredRequests See {@link #undeliveredRequests}. Can be used to build a custom message. * @param requestQueueMaxUsePercent See {@link #requestQueueMaxUsePercent}. Can be used to build a custom message. * @param requestQueueMaxUsePOA See {@link #requestQueueMaxUsePOA}. Can be used to build a custom message. */ protected void logStatus(String defaultLogMessage, Level defaultLogLevel, int connectionPoolUsePercent, int undeliveredRequests, int requestQueueMaxUsePercent, String requestQueueMaxUsePOA) { logger.log(defaultLogLevel, defaultLogMessage); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package org.apache.flink.runtime.blob; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.JobID; import org.apache.flink.api.java.tuple.Tuple2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.concurrent.GuardedBy; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.util.Preconditions.checkState; /** * BlobCacheSizeTracker uses {@link LinkedHashMap} to maintain the LRU order for the files in the * cache. When new files are intended to be put into cache, {@code checkLimit} is called to query * the files should be removed. This tracker maintains a lock to avoid concurrent modification. To * avoid the inconsistency, make sure that hold the READ/WRITE lock in {@link PermanentBlobCache} * first and then hold the lock here. */ public class BlobCacheSizeTracker { private static final Logger LOG = LoggerFactory.getLogger(BlobCacheSizeTracker.class); private static final int INITIAL_SIZE = 10_000; private final Object lock = new Object(); protected final long sizeLimit; @GuardedBy("lock") private long total; @GuardedBy("lock") private final LinkedHashMap<Tuple2<JobID, BlobKey>, Long> caches; @GuardedBy("lock") private final HashMap<JobID, Set<BlobKey>> blobKeyByJob; public BlobCacheSizeTracker(long sizeLimit) { checkArgument(sizeLimit > 0); this.sizeLimit = sizeLimit; this.total = 0L; this.caches = new LinkedHashMap<>(INITIAL_SIZE, 0.75F, true); this.blobKeyByJob = new HashMap<>(); } /** * Check the size limit and return the BLOBs to delete. * * @param size size of the BLOB intended to put into the cache * @return list of BLOBs to delete before putting into the target BLOB */ public List<Tuple2<JobID, BlobKey>> checkLimit(long size) { checkArgument(size >= 0); synchronized (lock) { List<Tuple2<JobID, BlobKey>> blobsToDelete = new ArrayList<>(); long current = total; for (Map.Entry<Tuple2<JobID, BlobKey>, Long> entry : caches.entrySet()) { if (current + size > sizeLimit) { blobsToDelete.add(entry.getKey()); current -= entry.getValue(); } } return blobsToDelete; } } /** Register the BLOB to the tracker. */ public void track(JobID jobId, BlobKey blobKey, long size) { checkNotNull(jobId); checkNotNull(blobKey); checkArgument(size >= 0); synchronized (lock) { if (caches.putIfAbsent(Tuple2.of(jobId, blobKey), size) == null) { blobKeyByJob.computeIfAbsent(jobId, ignore -> new HashSet<>()).add(blobKey); total += size; if (total > sizeLimit) { LOG.warn( "The overall size of BLOBs in the cache exceeds " + "the limit. Limit = [{}], Current: [{}], " + "The size of next BLOB: [{}].", sizeLimit, total, size); } } else { LOG.warn( "Attempt to track a duplicated BLOB. This may indicate a duplicate upload " + "or a hash collision. Ignoring newest upload. " + "JobID = [{}], BlobKey = [{}]", jobId, blobKey); } } } /** Remove the BLOB from the tracker. */ public void untrack(Tuple2<JobID, BlobKey> key) { checkNotNull(key); checkNotNull(key.f0); checkNotNull(key.f1); synchronized (lock) { blobKeyByJob.computeIfAbsent(key.f0, ignore -> new HashSet<>()).remove(key.f1); Long size = caches.remove(key); if (size != null) { checkState(size >= 0); total -= size; } } } /** Remove the BLOB from the tracker. */ private void untrack(JobID jobId, BlobKey blobKey) { checkNotNull(jobId); checkNotNull(blobKey); untrack(Tuple2.of(jobId, blobKey)); } /** * Update the least used index for the BLOBs so that the tracker can easily find out the least * recently used BLOBs. */ public void update(JobID jobId, BlobKey blobKey) { checkNotNull(jobId); checkNotNull(blobKey); synchronized (lock) { caches.get(Tuple2.of(jobId, blobKey)); } } /** Unregister all the tracked BLOBs related to the given job. */ public void untrackAll(JobID jobId) { checkNotNull(jobId); synchronized (lock) { Set<BlobKey> keysToRemove = blobKeyByJob.remove(jobId); if (keysToRemove != null) { for (BlobKey key : keysToRemove) { untrack(jobId, key); } } } } @VisibleForTesting Long getSize(JobID jobId, BlobKey blobKey) { checkNotNull(jobId); checkNotNull(blobKey); synchronized (lock) { return caches.get(Tuple2.of(jobId, blobKey)); } } @VisibleForTesting Set<BlobKey> getBlobKeysByJobId(JobID jobId) { checkNotNull(jobId); synchronized (lock) { return blobKeyByJob.getOrDefault(jobId, Collections.emptySet()); } } }
//package com.utd.aos; import java.net.*; import java.io.*; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; import java.util.StringTokenizer; import java.util.TimeZone; public class ReadWriteProtocol { public static void main(String arg[]) throws IOException, InterruptedException{ /*ReadWriteProtocol readwritetest=new ReadWriteProtocol(); String status=readwritetest.processInput("writeclient~writing at an offset~4"); System.out.println("status:"+status); /*status=readwritetest.processInput("readclient"); System.out.println("status:"+status);*/ //status=sendWriterequest } public String processInput(String input) throws IOException, InterruptedException{ String output=null; String hostname1=""; String hostname2=""; System.out.println("SERVERID:"+Server.ServerID); if(Server.ServerID==1){ hostname1=this.getservername(2); hostname2=this.getservername(3); }else if(Server.ServerID==2){ hostname1=this.getservername(1); hostname2=this.getservername(3); }else if(Server.ServerID==3){ hostname1=this.getservername(1); hostname2=this.getservername(2); } if(input.equalsIgnoreCase("clientconnect")){ output="Welcome Client"; }else if(input.startsWith("readclient")){ StringTokenizer st = new StringTokenizer(input,"~"); st.nextToken(); //String content=st.nextToken(); String filename=st.nextToken(); int offset=Integer.parseInt(st.nextToken()); System.out.println("filename:"+filename); System.out.println("readoffset:"+offset); // Read request from client output=readFile(filename,offset); //return output; }else if (input.startsWith("writeclient")){ //Write request from client String org_input=input; StringTokenizer st = new StringTokenizer(input,"~"); st.nextToken(); String filename=st.nextToken(); String content=st.nextToken(); int offset=Integer.parseInt(st.nextToken()); System.out.println("offset:"+offset); System.out.println("content:"+content); output=writeFile(filename,content); System.out.println("OUTPUT:"+output); String output1,output2,output3; if(output.equalsIgnoreCase("success")){ //send write request to the other server input=org_input.replace("writeclient~", "writeserver~"); System.out.println("input:"+input); output1=sendWriterequest(input,hostname1,4545); System.out.println("Output1:"+output1); if(output1.equalsIgnoreCase("success")){ input=org_input.replace("writeclient~", "writeserver~"); System.out.println("input3:"+input); output2=sendWriterequest(input,hostname2,4545); System.out.println("output2:"+output2); if(output2.equalsIgnoreCase("success")){ output="success"; }else output="Server Write Error: All server couldn't be updated"; }else output="Server Write Error: All server couldn't be updated"; }else{ output="Server Write Error: All server couldn't be updated"; } //output3=sendWriterequest(input,"net03.utdallas.edu",4545); }else if (input.startsWith("writeserver")){ //Write request from server StringTokenizer st = new StringTokenizer(input,"~"); st.nextToken(); String filename=st.nextToken(); String content=st.nextToken(); int offset=Integer.parseInt(st.nextToken()); System.out.println("offset:"+offset); System.out.println("content:"+content); output=writeFile(filename,content); } return output; } private String readFile(String filename,int offset){ String output=""; File file=new File("file/"+filename); if(!file.exists()){ output="File Not found!"; return output; }else{ if(offset==-1){ try{ FileInputStream filestream = new FileInputStream(file); DataInputStream datastream = new DataInputStream(filestream); BufferedReader br_reader = new BufferedReader(new InputStreamReader(datastream)); String strLine; //Read File Line By Line while ((strLine = br_reader.readLine()) != null) { // Print the content on the console output=output+strLine+"\n"; } //Close the input stream datastream.close(); return output; }catch (Exception e){//Catch exception if any System.err.println("Error: " + e.getMessage()); output=""; output="File Read error"; return output; } }else{ try{ RandomAccessFile file_offset = new RandomAccessFile(file, "r"); if(offset>file.length()) offset=(int)file.length(); file_offset.seek(offset); byte[] buffer=new byte[(int)(file.length()-offset)]; file_offset.readFully(buffer, 0, (int)(file.length()-offset)); output = new String(buffer); return output; }catch(Exception e){ System.err.println("Error: " + e.getMessage()); output=""; output="File Read error"; return output; } } } } /*private String writeFile(String filename,String content, int offset){ //String out=""; try { File file = new File("file/"+filename); if(offset==-11){ file.createNewFile(); }else{ if(!file.exists()){ return "file not found"; }} RandomAccessFile file_offset = new RandomAccessFile(file, "rw"); if(offset>file.length()||offset==-1||offset==-11) offset=(int)file.length(); file_offset.seek(offset); //System.out.println("filesize"+file.length()); byte[] buffer=new byte[(int)(file.length()-offset)]; file_offset.readFully(buffer, 0, (int)(file.length()-offset)); //System.out.println("buffer:"+buffer); // move the pointer to the offset file_offset.seek(offset); file_offset.writeBytes(content); file_offset.write(buffer); //System.out.println("filesize"+file.length()); file_offset.close(); System.out.println("lengthafterwriting:"+file.length()); return "success"; } catch (IOException e) { System.out.println("IOException:"+e.getLocalizedMessage()); //e.printStackTrace(); return "Server write error"; } }*/ private String writeFile(String f, String s) throws IOException { FileWriter aWriter = new FileWriter(f, true); aWriter.write(s + "\n"); aWriter.flush(); aWriter.close(); System.out.println("Writing to this server successful"); return "success"; } private String getservername(int serverid){ String hostnm=""; if(serverid==1){ hostnm="net10.utdallas.edu"; }else if(serverid==2){ hostnm="net11.utdallas.edu"; }else if(serverid==3){ hostnm="net12.utdallas.edu"; } System.out.println("GETHOSTNAME:"+hostnm); return hostnm; } protected String sendWriterequest(String input, String hostip, int port) throws IOException, InterruptedException { // Write a client program which connects to other servers and send the write request Socket clientsocket = null; PrintWriter out = null; BufferedReader in = null; try { //int port=4545; clientsocket = new Socket(hostip, port); out = new PrintWriter(clientsocket.getOutputStream(), true); in = new BufferedReader(new InputStreamReader(clientsocket.getInputStream())); } catch (UnknownHostException e) { System.err.println("Error connecting Host:"+e.getMessage()); return "Error connecting Host"; //System.exit(1); } catch (IOException e) { System.err.println("I/O exception while connecting to the host"+e.getMessage()); return "I/O connection Error"; // System.exit(1); } System.out.println(" Connected to machine ip....."+ hostip); //BufferedReader stdIn = new BufferedReader(new InputStreamReader(System.in)); String fromServer; String fromUser; //fromServer = in.readLine(); /*if(!fromServer.isEmpty()) System.out.println("Server: " + fromServer);*/ fromUser = input; if (fromUser != null) { System.out.println("Client: " + fromUser); out.println(fromUser); } while ((fromServer = in.readLine()) != null) { //System.out.println("Server: " + fromServer); /*if (fromServer.equals("Bye.")) break;*/ //fromUser = stdIn.readLine(); if(fromServer.isEmpty()) continue; else break; //Thread.currentThread().sleep(100); } out.close(); in.close(); clientsocket.close(); return fromServer; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.jstorm.common.metric; import com.alibaba.jstorm.client.ConfigExtension; import com.alibaba.jstorm.common.metric.snapshot.AsmSnapshot; import com.alibaba.jstorm.metric.AsmWindow; import com.alibaba.jstorm.metric.MetaType; import com.alibaba.jstorm.metric.MetricType; import com.alibaba.jstorm.utils.TimeUtils; import com.codahale.metrics.Metric; import com.google.common.base.Joiner; import com.google.common.collect.Lists; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.concurrent.ConcurrentHashMap; public abstract class AsmMetric<T extends Metric> { protected final Logger logger = LoggerFactory.getLogger(getClass()); private static final Joiner JOINER = Joiner.on("."); protected static final List<Integer> windowSeconds = Lists .newArrayList(AsmWindow.M1_WINDOW, AsmWindow.M10_WINDOW, AsmWindow.H2_WINDOW, AsmWindow.D1_WINDOW); protected static final List<Integer> nettyWindows = Lists.newArrayList(AsmWindow.M1_WINDOW); protected static int minWindow = AsmWindow.M1_WINDOW; protected static final List<Integer> EMPTY_WIN = Lists.newArrayListWithCapacity(0); /** * sample rate for meter, histogram and timer, note that counter & gauge are not sampled. */ private static double sampleRate = ConfigExtension.DEFAULT_METRIC_SAMPLE_RATE; protected int op = MetricOp.REPORT; protected volatile long metricId = 0L; protected String metricName; protected boolean aggregate = true; protected volatile long lastFlushTime = TimeUtils.current_time_secs() - AsmWindow.M1_WINDOW; protected Map<Integer, Long> rollingTimeMap = new ConcurrentHashMap<>(); protected Map<Integer, Boolean> rollingDirtyMap = new ConcurrentHashMap<>(); protected final Map<Integer, AsmSnapshot> snapshots = new ConcurrentHashMap<Integer, AsmSnapshot>(); protected Set<AsmMetric> assocMetrics = new HashSet<AsmMetric>(); public AsmMetric() { for (Integer win : windowSeconds) { rollingTimeMap.put(win, lastFlushTime); rollingDirtyMap.put(win, false); } } /** * keep a random for each instance to avoid competition (although it's thread-safe). */ private final Random rand = new Random(); protected boolean sample() { return rand.nextDouble() <= sampleRate; } public static void setSampleRate(double sampleRate) { AsmMetric.sampleRate = sampleRate; } /** * In order to improve performance */ public abstract void update(Number obj); public void updateDirectly(Number obj) { update(obj); } public abstract AsmMetric clone(); public AsmMetric setOp(int op) { this.op = op; return this; } public int getOp() { return this.op; } /** * for test */ public static void setWindowSeconds(List<Integer> windows) { synchronized (windowSeconds) { windowSeconds.clear(); windowSeconds.addAll(windows); minWindow = getMinWindow(windows); } } public static int getMinWindow(List<Integer> windows) { int min = Integer.MAX_VALUE; for (int win : windows) { if (win < min) { min = win; } } return min; } public void addAssocMetrics(AsmMetric... metrics) { Collections.addAll(assocMetrics, metrics); } public long getMetricId() { return metricId; } public void setMetricId(long metricId) { this.metricId = metricId; } public String getMetricName() { return metricName; } public void setMetricName(String metricName) { this.metricName = metricName; } public void flush() { long time = TimeUtils.current_time_secs(); List<Integer> windows = getValidWindows(); if (windows.size() == 0) { return; } doFlush(); List<Integer> rollwindows = rollWindows(time, windows); for (int win : windows) { if (rollwindows.contains(win)) { updateSnapshot(win); Map<Integer, T> metricMap = getWindowMetricMap(); if (metricMap != null) { metricMap.put(win, mkInstance()); } } else if (!rollingDirtyMap.get(win)) { //if this window has never been passed, we still update this window snapshot updateSnapshot(win); } } this.lastFlushTime = TimeUtils.current_time_secs(); } public List<Integer> rollWindows(long time, List<Integer> windows) { List<Integer> rolling = new ArrayList<>(); for (Integer win : windows) { long rollingTime = rollingTimeMap.get(win); // might delay somehow, so add extra 5 sec bias if (time - rollingTime >= win - 5) { rolling.add(win); rollingDirtyMap.put(win, true); //mark this window has been passed rollingTimeMap.put(win, (long) TimeUtils.current_time_secs()); } } return rolling; } /** * flush temp data to all windows & assoc metrics. */ protected abstract void doFlush(); public abstract Map<Integer, T> getWindowMetricMap(); public abstract T mkInstance(); protected abstract void updateSnapshot(int window); public Map<Integer, AsmSnapshot> getSnapshots() { return snapshots; } /** * DO NOT judge whether to flush by 60sec because there might be nuance by the alignment of time(maybe less than 1 sec?) * so we subtract 5 sec from a min flush window. */ public List<Integer> getValidWindows() { long diff = TimeUtils.current_time_secs() - this.lastFlushTime + 5; if (diff < minWindow) { // logger.warn("no valid windows for metric:{}, diff:{}", this.metricName, diff); return EMPTY_WIN; } // for netty metrics, use only 1min window if (this.metricName.startsWith(MetaType.NETTY.getV())) { return nettyWindows; } return windowSeconds; } public boolean isAggregate() { return aggregate; } public void setAggregate(boolean aggregate) { this.aggregate = aggregate; } public static String mkName(Object... parts) { return JOINER.join(parts); } public static class MetricOp { public static final int LOG = 1; public static final int REPORT = 2; } public static class Builder { public static AsmMetric build(MetricType metricType) { AsmMetric metric; if (metricType == MetricType.COUNTER) { metric = new AsmCounter(); } else if (metricType == MetricType.METER) { metric = new AsmMeter(); } else if (metricType == MetricType.HISTOGRAM) { metric = new AsmHistogram(); } else if (metricType == MetricType.TIMER) { metric = new AsmTimer(); } else { throw new IllegalArgumentException("invalid metric type:" + metricType); } return metric; } } public static void main(String[] args) throws Exception { AsmMeter meter = new AsmMeter(); int t = 0, f = 0; for (int i = 0; i < 100; i++) { if (meter.sample()) { t++; } else { f++; } } System.out.println(t + "," + f); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Map; import java.util.NavigableMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseTestCase.FlushCache; import org.apache.hadoop.hbase.HBaseTestCase.HTableIncommon; import org.apache.hadoop.hbase.HBaseTestCase.Incommon; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Port of old TestScanMultipleVersions, TestTimestamp and TestGetRowVersions * from old testing framework to {@link HBaseTestingUtility}. */ @Category(MediumTests.class) public class TestMultiVersions { private static final Log LOG = LogFactory.getLog(TestMultiVersions.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private HBaseAdmin admin; private static final int NUM_SLAVES = 3; @BeforeClass public static void setUpBeforeClass() throws Exception { UTIL.startMiniCluster(NUM_SLAVES); } @AfterClass public static void tearDownAfterClass() throws Exception { UTIL.shutdownMiniCluster(); } @Before public void before() throws MasterNotRunningException, ZooKeeperConnectionException, IOException { this.admin = new HBaseAdmin(UTIL.getConfiguration()); } @After public void after() throws IOException { this.admin.close(); } /** * Tests user specifiable time stamps putting, getting and scanning. Also * tests same in presence of deletes. Test cores are written so can be * run against an HRegion and against an HTable: i.e. both local and remote. * * <p>Port of old TestTimestamp test to here so can better utilize the spun * up cluster running more than a single test per spin up. Keep old tests' * crazyness. */ @Test public void testTimestamps() throws Exception { HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("testTimestamps")); HColumnDescriptor hcd = new HColumnDescriptor(TimestampTestBase.FAMILY_NAME); hcd.setMaxVersions(3); desc.addFamily(hcd); this.admin.createTable(desc); HTable table = new HTable(UTIL.getConfiguration(), desc.getTableName()); // TODO: Remove these deprecated classes or pull them in here if this is // only test using them. Incommon incommon = new HTableIncommon(table); TimestampTestBase.doTestDelete(incommon, new FlushCache() { public void flushcache() throws IOException { UTIL.getHBaseCluster().flushcache(); } }); // Perhaps drop and readd the table between tests so the former does // not pollute this latter? Or put into separate tests. TimestampTestBase.doTestTimestampScanning(incommon, new FlushCache() { public void flushcache() throws IOException { UTIL.getMiniHBaseCluster().flushcache(); } }); table.close(); } /** * Verifies versions across a cluster restart. * Port of old TestGetRowVersions test to here so can better utilize the spun * up cluster running more than a single test per spin up. Keep old tests' * crazyness. */ @Test public void testGetRowVersions() throws Exception { final String tableName = "testGetRowVersions"; final byte [] contents = Bytes.toBytes("contents"); final byte [] row = Bytes.toBytes("row"); final byte [] value1 = Bytes.toBytes("value1"); final byte [] value2 = Bytes.toBytes("value2"); final long timestamp1 = 100L; final long timestamp2 = 200L; final HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName)); HColumnDescriptor hcd = new HColumnDescriptor(contents); hcd.setMaxVersions(3); desc.addFamily(hcd); this.admin.createTable(desc); Put put = new Put(row, timestamp1); put.add(contents, contents, value1); HTable table = new HTable(UTIL.getConfiguration(), tableName); table.put(put); // Shut down and restart the HBase cluster table.close(); UTIL.shutdownMiniHBaseCluster(); LOG.debug("HBase cluster shut down -- restarting"); UTIL.startMiniHBaseCluster(1, NUM_SLAVES); // Make a new connection. Use new Configuration instance because old one // is tied to an HConnection that has since gone stale. table = new HTable(new Configuration(UTIL.getConfiguration()), tableName); // Overwrite previous value put = new Put(row, timestamp2); put.add(contents, contents, value2); table.put(put); // Now verify that getRow(row, column, latest) works Get get = new Get(row); // Should get one version by default Result r = table.get(get); assertNotNull(r); assertFalse(r.isEmpty()); assertTrue(r.size() == 1); byte [] value = r.getValue(contents, contents); assertTrue(value.length != 0); assertTrue(Bytes.equals(value, value2)); // Now check getRow with multiple versions get = new Get(row); get.setMaxVersions(); r = table.get(get); assertTrue(r.size() == 2); value = r.getValue(contents, contents); assertTrue(value.length != 0); assertTrue(Bytes.equals(value, value2)); NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> map = r.getMap(); NavigableMap<byte[], NavigableMap<Long, byte[]>> familyMap = map.get(contents); NavigableMap<Long, byte[]> versionMap = familyMap.get(contents); assertTrue(versionMap.size() == 2); assertTrue(Bytes.equals(value1, versionMap.get(timestamp1))); assertTrue(Bytes.equals(value2, versionMap.get(timestamp2))); table.close(); } /** * Port of old TestScanMultipleVersions test here so can better utilize the * spun up cluster running more than just a single test. Keep old tests * crazyness. * * <p>Tests five cases of scans and timestamps. * @throws Exception */ @Test public void testScanMultipleVersions() throws Exception { final byte [] tableName = Bytes.toBytes("testScanMultipleVersions"); final HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName)); desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY)); final byte [][] rows = new byte[][] { Bytes.toBytes("row_0200"), Bytes.toBytes("row_0800") }; final byte [][] splitRows = new byte[][] {Bytes.toBytes("row_0500")}; final long [] timestamp = new long[] {100L, 1000L}; this.admin.createTable(desc, splitRows); HTable table = new HTable(UTIL.getConfiguration(), tableName); // Assert we got the region layout wanted. NavigableMap<HRegionInfo, ServerName> locations = table.getRegionLocations(); assertEquals(2, locations.size()); int index = 0; for (Map.Entry<HRegionInfo, ServerName> e: locations.entrySet()) { HRegionInfo hri = e.getKey(); if (index == 0) { assertTrue(Bytes.equals(HConstants.EMPTY_START_ROW, hri.getStartKey())); assertTrue(Bytes.equals(hri.getEndKey(), splitRows[0])); } else if (index == 1) { assertTrue(Bytes.equals(splitRows[0], hri.getStartKey())); assertTrue(Bytes.equals(hri.getEndKey(), HConstants.EMPTY_END_ROW)); } index++; } // Insert data for (int i = 0; i < locations.size(); i++) { for (int j = 0; j < timestamp.length; j++) { Put put = new Put(rows[i], timestamp[j]); put.add(HConstants.CATALOG_FAMILY, null, timestamp[j], Bytes.toBytes(timestamp[j])); table.put(put); } } // There are 5 cases we have to test. Each is described below. for (int i = 0; i < rows.length; i++) { for (int j = 0; j < timestamp.length; j++) { Get get = new Get(rows[i]); get.addFamily(HConstants.CATALOG_FAMILY); get.setTimeStamp(timestamp[j]); Result result = table.get(get); int cellCount = 0; for(@SuppressWarnings("unused")Cell kv : result.listCells()) { cellCount++; } assertTrue(cellCount == 1); } table.close(); } // Case 1: scan with LATEST_TIMESTAMP. Should get two rows int count = 0; Scan scan = new Scan(); scan.addFamily(HConstants.CATALOG_FAMILY); ResultScanner s = table.getScanner(scan); try { for (Result rr = null; (rr = s.next()) != null;) { System.out.println(rr.toString()); count += 1; } assertEquals("Number of rows should be 2", 2, count); } finally { s.close(); } // Case 2: Scan with a timestamp greater than most recent timestamp // (in this case > 1000 and < LATEST_TIMESTAMP. Should get 2 rows. count = 0; scan = new Scan(); scan.setTimeRange(1000L, Long.MAX_VALUE); scan.addFamily(HConstants.CATALOG_FAMILY); s = table.getScanner(scan); try { while (s.next() != null) { count += 1; } assertEquals("Number of rows should be 2", 2, count); } finally { s.close(); } // Case 3: scan with timestamp equal to most recent timestamp // (in this case == 1000. Should get 2 rows. count = 0; scan = new Scan(); scan.setTimeStamp(1000L); scan.addFamily(HConstants.CATALOG_FAMILY); s = table.getScanner(scan); try { while (s.next() != null) { count += 1; } assertEquals("Number of rows should be 2", 2, count); } finally { s.close(); } // Case 4: scan with timestamp greater than first timestamp but less than // second timestamp (100 < timestamp < 1000). Should get 2 rows. count = 0; scan = new Scan(); scan.setTimeRange(100L, 1000L); scan.addFamily(HConstants.CATALOG_FAMILY); s = table.getScanner(scan); try { while (s.next() != null) { count += 1; } assertEquals("Number of rows should be 2", 2, count); } finally { s.close(); } // Case 5: scan with timestamp equal to first timestamp (100) // Should get 2 rows. count = 0; scan = new Scan(); scan.setTimeStamp(100L); scan.addFamily(HConstants.CATALOG_FAMILY); s = table.getScanner(scan); try { while (s.next() != null) { count += 1; } assertEquals("Number of rows should be 2", 2, count); } finally { s.close(); } } }
// Copyright 2011-2016 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.security.zynamics.binnavi.Gui.CriteriaDialog; import com.google.common.base.Preconditions; import com.google.security.zynamics.binnavi.API.plugins.ICriteriaSelectionPlugin; import com.google.security.zynamics.binnavi.API.plugins.ICriteriaSelectionPlugin.IFixedCriterium; import com.google.security.zynamics.binnavi.API.plugins.PluginInterface; import com.google.security.zynamics.binnavi.CUtilityFunctions; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.CConditionCriterium; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.ICachedCriterium; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.ICriterium; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.ICriteriumCreator; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.InDegrees.CIndegreeCriteriumCreator; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.NodeColor.CColorCriteriumCreator; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.OutDegree.COutdegreeCriteriumCreator; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.Selection.CSelectionCriteriumCreator; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.Tag.CTagCriteriumCreator; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.Text.CTextCriteriumCreator; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.Conditions.Visibillity.CVisibilityCriteriumCreator; import com.google.security.zynamics.binnavi.Gui.CriteriaDialog.ExpressionModel.CCachedExpressionTreeNode; import com.google.security.zynamics.binnavi.Tagging.ITagManager; import com.google.security.zynamics.binnavi.api2.plugins.IPlugin; import com.google.security.zynamics.binnavi.yfileswrap.API.disassembly.View2D; import com.google.security.zynamics.binnavi.yfileswrap.zygraph.NaviNode; import com.google.security.zynamics.binnavi.yfileswrap.zygraph.ZyGraph; import java.util.ArrayList; import java.util.List; import javax.swing.Icon; import javax.swing.JPanel; /** * Provides all available individual criteria for the criteria dialog. */ public final class CCriteriaFactory { /** * Graph whose nodes are selected. */ private final ZyGraph m_graph; /** * API graph whose nodes are selected. */ private final View2D m_view2D; /** * Provides the available node tags. */ private final ITagManager m_tagManager; /** * Creates a new factory object. * * @param graph Graph whose nodes are selected. * @param view2D API graph whose nodes are selected. * @param tagManager Provides the available node tags. */ public CCriteriaFactory(final ZyGraph graph, final View2D view2D, final ITagManager tagManager) { m_graph = Preconditions.checkNotNull(graph, "IE01316: Graph argument can not be null"); // m_view2D = Preconditions.checkNotNull(view2D, "IE01794: View 2D argument can not be null"); m_view2D = view2D; // m_tagManager = Preconditions.checkNotNull(tagManager, // "IE02088: tagManager argument can not be null"); m_tagManager = tagManager; } /** * Returns all available criterium creators. * * @return All available criterium creators. */ public List<ICriteriumCreator> getConditions() { final List<ICriteriumCreator> conditions = new ArrayList<ICriteriumCreator>(); conditions.add(new CTextCriteriumCreator()); conditions.add(new CTagCriteriumCreator(m_tagManager)); conditions.add(new CColorCriteriumCreator(m_graph)); conditions.add(new CIndegreeCriteriumCreator()); conditions.add(new COutdegreeCriteriumCreator()); conditions.add(new CVisibilityCriteriumCreator()); conditions.add(new CSelectionCriteriumCreator()); for ( @SuppressWarnings("rawtypes") final IPlugin plugin : PluginInterface.instance().getPluginRegistry()) { if (plugin instanceof ICriteriaSelectionPlugin) { final ICriteriaSelectionPlugin cplugin = (ICriteriaSelectionPlugin) plugin; conditions.add(new CPluginCriteriumCreator(m_view2D, cplugin)); } } return conditions; } /** * Wraps plugin criteria. */ private static class CPluginCachedCriterium implements ICachedCriterium { /** * API graph whose nodes are selected. */ private final View2D m_view2D; /** * The wrapped plugin object. */ private final com.google.security.zynamics.binnavi.API.plugins.ICriteriaSelectionPlugin.ICriterium m_plugin; /** * Object that contains a fixed point version of the values from the input panel. */ private final IFixedCriterium m_value; /** * Creates a new cached criterium. * * @param view2D API graph whose nodes are selected. * @param plugin The wrapped plugin object. */ public CPluginCachedCriterium( final View2D view2D, final com.google.security.zynamics.binnavi.API.plugins.ICriteriaSelectionPlugin.ICriterium plugin) { m_view2D = view2D; m_plugin = plugin; m_value = plugin.getFixedCriterium(); } @Override public String getFormulaString(final List<CCachedExpressionTreeNode> children) { try { return m_plugin.getFormulaString(); } catch (final Exception exception) { CUtilityFunctions.logException(exception); return "???"; } } @Override public boolean matches(final NaviNode node) { try { return m_value.matches(m_view2D.getNode(node.getRawNode())); } catch (final Exception exception) { CUtilityFunctions.logException(exception); return false; } } } /** * Wraps a plugin criterium. */ private static class CPluginCriterium extends CConditionCriterium { /** * API graph whose nodes are selected. */ private final View2D m_view2D; /** * The wrapped plugin object. */ private final com.google.security.zynamics.binnavi.API.plugins.ICriteriaSelectionPlugin.ICriterium m_plugin; /** * Creates a new criterium object. * * @param view2D API graph whose nodes are selected. * @param plugin The wrapped plugin object. */ public CPluginCriterium( final View2D view2D, final com.google.security.zynamics.binnavi.API.plugins.ICriteriaSelectionPlugin.ICriterium plugin) { m_view2D = view2D; m_plugin = plugin; } @Override public ICachedCriterium createCachedCriterium() { return new CPluginCachedCriterium(m_view2D, m_plugin); } @Override public String getCriteriumDescription() { try { return m_plugin.getCriteriumDescription(); } catch (final Exception exception) { CUtilityFunctions.logException(exception); return "???"; } } @Override public JPanel getCriteriumPanel() { try { return m_plugin.getCriteriumPanel(); } catch (final Exception exception) { return new JPanel(); } } @Override public Icon getIcon() { return null; } @Override public boolean matches(final NaviNode node) { return m_plugin.matches(m_view2D.getNode(node.getRawNode())); } } /** * Wraps plugin criteria creators. */ private static class CPluginCriteriumCreator implements ICriteriumCreator { /** * API graph whose nodes are selected. */ private final View2D m_view2D; /** * The wrapped plugin. */ private final ICriteriaSelectionPlugin m_plugin; /** * Creates a new creator object. * * @param view2D API graph whose nodes are selected. * @param plugin The wrapped plugin. */ public CPluginCriteriumCreator(final View2D view2D, final ICriteriaSelectionPlugin plugin) { m_view2D = view2D; m_plugin = plugin; } @Override public ICriterium createCriterium() { try { return new CPluginCriterium(m_view2D, m_plugin.getCriterium(m_view2D)); } catch (final Exception exception) { CUtilityFunctions.logException(exception); return null; } } @Override public String getCriteriumDescription() { return m_plugin.getCriteriumDescription(); } } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.cpp; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.fail; import com.google.common.base.Joiner; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.devtools.build.lib.analysis.config.InvalidConfigurationException; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.ActionConfig; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.ExpansionException; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.FeatureConfiguration; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.Variables; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.Variables.IntegerValue; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.Variables.LibraryToLinkValue; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.Variables.SequenceBuilder; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.Variables.StringSequenceBuilder; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.Variables.StructureBuilder; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.Variables.VariableValue; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.Variables.VariableValueBuilder; import com.google.devtools.build.lib.testutil.Suite; import com.google.devtools.build.lib.testutil.TestSpec; import com.google.devtools.build.lib.testutil.TestUtils; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.view.config.crosstool.CrosstoolConfig.CToolchain; import com.google.protobuf.TextFormat; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for toolchain features. */ @RunWith(JUnit4.class) @TestSpec(size = Suite.MEDIUM_TESTS) public class CcToolchainFeaturesTest { /** * Creates a {@code Variables} configuration from a list of key/value pairs. * * <p>If there are multiple entries with the same key, the variable will be treated as sequence * type. */ private Variables createVariables(String... entries) { if (entries.length % 2 != 0) { throw new IllegalArgumentException( "createVariables takes an even number of arguments (key/value pairs)"); } Multimap<String, String> entryMap = ArrayListMultimap.create(); for (int i = 0; i < entries.length; i += 2) { entryMap.put(entries[i], entries[i + 1]); } Variables.Builder variables = new Variables.Builder(); for (String name : entryMap.keySet()) { Collection<String> value = entryMap.get(name); if (value.size() == 1) { variables.addStringVariable(name, value.iterator().next()); } else { variables.addStringSequenceVariable(name, ImmutableList.copyOf(value)); } } return variables.build(); } /** * Creates a CcToolchainFeatures from features described in the given toolchain fragment. */ public static CcToolchainFeatures buildFeatures(String... toolchain) throws Exception { CToolchain.Builder toolchainBuilder = CToolchain.newBuilder(); TextFormat.merge(Joiner.on("").join(toolchain), toolchainBuilder); return new CcToolchainFeatures(toolchainBuilder.buildPartial()); } private Set<String> getEnabledFeatures(CcToolchainFeatures features, String... requestedFeatures) throws Exception { FeatureConfiguration configuration = features.getFeatureConfiguration(assumptionsFor(requestedFeatures)); ImmutableSet.Builder<String> enabledFeatures = ImmutableSet.builder(); for (String feature : features.getActivatableNames()) { if (configuration.isEnabled(feature)) { enabledFeatures.add(feature); } } return enabledFeatures.build(); } private FeatureSpecification assumptionsFor(String... requestedFeatures) { return FeatureSpecification.create( ImmutableSet.copyOf(requestedFeatures), ImmutableSet.<String>of()); } @Test public void testUnconditionalFeature() throws Exception { assertThat(buildFeatures("").getFeatureConfiguration(assumptionsFor("a")).isEnabled("a")) .isFalse(); assertThat( buildFeatures("feature { name: 'a' }") .getFeatureConfiguration(assumptionsFor("b")) .isEnabled("a")) .isFalse(); assertThat( buildFeatures("feature { name: 'a' }") .getFeatureConfiguration(assumptionsFor("a")) .isEnabled("a")) .isTrue(); } @Test public void testUnsupportedAction() throws Exception { FeatureConfiguration configuration = buildFeatures("").getFeatureConfiguration(assumptionsFor()); assertThat(configuration.getCommandLine("invalid-action", createVariables())).isEmpty(); } @Test public void testFlagOrderEqualsSpecOrder() throws Exception { FeatureConfiguration configuration = buildFeatures( "feature {", " name: 'a'", " flag_set {", " action: 'c++-compile'", " flag_group { flag: '-a-c++-compile' }", " }", " flag_set {", " action: 'link'", " flag_group { flag: '-a-c++-compile' }", " }", "}", "feature {", " name: 'b'", " flag_set {", " action: 'c++-compile'", " flag_group { flag: '-b-c++-compile' }", " }", " flag_set {", " action: 'link'", " flag_group { flag: '-b-link' }", " }", "}") .getFeatureConfiguration(assumptionsFor("a", "b")); List<String> commandLine = configuration.getCommandLine( CppCompileAction.CPP_COMPILE, createVariables()); assertThat(commandLine).containsExactly("-a-c++-compile", "-b-c++-compile").inOrder(); } @Test public void testEnvVars() throws Exception { FeatureConfiguration configuration = buildFeatures( "feature {", " name: 'a'", " env_set {", " action: 'c++-compile'", " env_entry { key: 'foo', value: 'bar' }", " env_entry { key: 'cat', value: 'meow' }", " }", " flag_set {", " action: 'c++-compile'", " flag_group { flag: '-a-c++-compile' }", " }", "}", "feature {", " name: 'b'", " env_set {", " action: 'c++-compile'", " env_entry { key: 'dog', value: 'woof' }", " }", "}", "feature {", " name: 'c'", " env_set {", " action: 'c++-compile'", " env_entry { key: 'doNotInclude', value: 'doNotIncludePlease' }", " }", "}") .getFeatureConfiguration(assumptionsFor("a", "b")); Map<String, String> env = configuration.getEnvironmentVariables( CppCompileAction.CPP_COMPILE, createVariables()); assertThat(env).containsExactly("foo", "bar", "cat", "meow", "dog", "woof").inOrder(); assertThat(env).doesNotContainEntry("doNotInclude", "doNotIncludePlease"); } private String getExpansionOfFlag(String value) throws Exception { return getExpansionOfFlag(value, createVariables()); } private List<String> getCommandLineForFlagGroups(String groups, Variables variables) throws Exception { FeatureConfiguration configuration = buildFeatures( "feature {", " name: 'a'", " flag_set {", " action: 'c++-compile'", " " + groups, " }", "}") .getFeatureConfiguration(assumptionsFor("a")); return configuration.getCommandLine(CppCompileAction.CPP_COMPILE, variables); } private List<String> getCommandLineForFlag(String value, Variables variables) throws Exception { return getCommandLineForFlagGroups("flag_group { flag: '" + value + "' }", variables); } private String getExpansionOfFlag(String value, Variables variables) throws Exception { return getCommandLineForFlag(value, variables).get(0); } private String getFlagParsingError(String value) throws Exception { try { getExpansionOfFlag(value); fail("Expected InvalidConfigurationException"); return ""; } catch (InvalidConfigurationException e) { return e.getMessage(); } } private String getFlagExpansionError(String value, Variables variables) throws Exception { try { getExpansionOfFlag(value, variables); fail("Expected ExpansionException"); return ""; } catch (ExpansionException e) { return e.getMessage(); } } private String getFlagGroupsExpansionError(String flagGroups, Variables variables) throws Exception { try { getCommandLineForFlagGroups(flagGroups, variables).get(0); fail("Expected ExpansionException"); return ""; } catch (ExpansionException e) { return e.getMessage(); } } @Test public void testVariableExpansion() throws Exception { assertThat(getExpansionOfFlag("%%")).isEqualTo("%"); assertThat(getExpansionOfFlag("%% a %% b %%")).isEqualTo("% a % b %"); assertThat(getExpansionOfFlag("%%{var}")).isEqualTo("%{var}"); assertThat(getExpansionOfFlag("%{v}", createVariables("v", "<flag>"))).isEqualTo("<flag>"); assertThat(getExpansionOfFlag(" %{v1} %{v2} ", createVariables("v1", "1", "v2", "2"))) .isEqualTo(" 1 2 "); assertThat(getFlagParsingError("%")).contains("expected '{'"); assertThat(getFlagParsingError("% ")).contains("expected '{'"); assertThat(getFlagParsingError("%{")).contains("expected variable name"); assertThat(getFlagParsingError("%{}")).contains("expected variable name"); assertThat( getCommandLineForFlagGroups( "flag_group{ iterate_over: 'v' flag: '%{v}' }", new Variables.Builder() .addStringSequenceVariable("v", ImmutableList.<String>of()) .build())) .isEmpty(); assertThat(getFlagExpansionError("%{v}", createVariables())) .contains("Invalid toolchain configuration: Cannot find variable named 'v'"); } private Variables createStructureSequenceVariables(String name, StructureBuilder... values) { SequenceBuilder builder = new SequenceBuilder(); for (StructureBuilder value : values) { builder.addValue(value.build()); } return new Variables.Builder().addCustomBuiltVariable(name, builder).build(); } private Variables createStructureVariables(String name, StructureBuilder value) { return new Variables.Builder().addCustomBuiltVariable(name, value).build(); } @Test public void testSimpleStructureVariableExpansion() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group { flag: '-A%{struct.foo}' flag: '-B%{struct.bar}' }", createStructureVariables( "struct", new StructureBuilder() .addField("foo", "fooValue") .addField("bar", "barValue")))) .containsExactly("-AfooValue", "-BbarValue"); } @Test public void testNestedStructureVariableExpansion() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group { flag: '-A%{struct.foo.bar}' }", createStructureVariables( "struct", new StructureBuilder() .addField("foo", new StructureBuilder().addField("bar", "fooBarValue"))))) .containsExactly("-AfooBarValue"); } @Test public void testAccessingStructureAsStringFails() throws Exception { assertThat( getFlagGroupsExpansionError( "flag_group { flag: '-A%{struct}' }", createStructureVariables( "struct", new StructureBuilder() .addField("foo", "fooValue") .addField("bar", "barValue")))) .isEqualTo( "Invalid toolchain configuration: Cannot expand variable 'struct': expected string, " + "found structure"); } @Test public void testAccessingStringValueAsStructureFails() throws Exception { assertThat( getFlagGroupsExpansionError( "flag_group { flag: '-A%{stringVar.foo}' }", createVariables("stringVar", "stringVarValue"))) .isEqualTo( "Invalid toolchain configuration: Cannot expand variable 'stringVar.foo': variable " + "'stringVar' is string, expected structure"); } @Test public void testAccessingSequenceAsStructureFails() throws Exception { assertThat( getFlagGroupsExpansionError( "flag_group { flag: '-A%{sequence.foo}' }", createVariables("sequence", "foo1", "sequence", "foo2"))) .isEqualTo( "Invalid toolchain configuration: Cannot expand variable 'sequence.foo': variable " + "'sequence' is sequence, expected structure"); } @Test public void testAccessingMissingStructureFieldFails() throws Exception { assertThat( getFlagGroupsExpansionError( "flag_group { flag: '-A%{struct.missing}' }", createStructureVariables( "struct", new StructureBuilder().addField("bar", "barValue")))) .isEqualTo( "Invalid toolchain configuration: Cannot expand variable 'struct.missing': structure " + "struct doesn't have a field named 'missing'"); } @Test public void testSequenceOfStructuresExpansion() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group { iterate_over: 'structs' flag: '-A%{structs.foo}' }", createStructureSequenceVariables( "structs", new StructureBuilder().addField("foo", "foo1Value"), new StructureBuilder().addField("foo", "foo2Value")))) .containsExactly("-Afoo1Value", "-Afoo2Value"); } @Test public void testStructureOfSequencesExpansion() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " iterate_over: 'struct.sequences'" + " flag: '-A%{struct.sequences.foo}'" + "}", createStructureVariables( "struct", new StructureBuilder() .addField( "sequences", new SequenceBuilder() .addValue(new StructureBuilder().addField("foo", "foo1Value")) .addValue(new StructureBuilder().addField("foo", "foo2Value")))))) .containsExactly("-Afoo1Value", "-Afoo2Value"); } @Test public void testDottedNamesNotAlwaysMeanStructures() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " iterate_over: 'struct.sequence'" + " flag_group {" + " iterate_over: 'other_sequence'" + " flag_group {" + " flag: '-A%{struct.sequence} -B%{other_sequence}'" + " }" + " }" + "}", new Variables.Builder() .addCustomBuiltVariable( "struct", new StructureBuilder() .addField("sequence", ImmutableList.of("first", "second"))) .addStringSequenceVariable("other_sequence", ImmutableList.of("foo", "bar")) .build())) .containsExactly("-Afirst -Bfoo", "-Afirst -Bbar", "-Asecond -Bfoo", "-Asecond -Bbar"); } @Test public void testExpandIfAllAvailableWithStructsExpandsIfPresent() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_all_available: 'struct'" + " flag: '-A%{struct.foo}'" + " flag: '-B%{struct.bar}'" + "}", createStructureVariables( "struct", new Variables.StructureBuilder() .addField("foo", "fooValue") .addField("bar", "barValue")))) .containsExactly("-AfooValue", "-BbarValue"); } @Test public void testExpandIfAllAvailableWithStructsDoesntExpandIfMissing() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_all_available: 'nonexistent'" + " flag: '-A%{struct.foo}'" + " flag: '-B%{struct.bar}'" + "}", createStructureVariables( "struct", new Variables.StructureBuilder() .addField("foo", "fooValue") .addField("bar", "barValue")))) .isEmpty(); } @Test public void testExpandIfAllAvailableWithStructsDoesntCrashIfMissing() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_all_available: 'nonexistent'" + " flag: '-A%{nonexistent.foo}'" + " flag: '-B%{nonexistent.bar}'" + "}", createVariables())) .isEmpty(); } @Test public void testExpandIfAllAvailableWithStructFieldDoesntCrashIfMissing() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_all_available: 'nonexistent.nonexistant_field'" + " flag: '-A%{nonexistent.foo}'" + " flag: '-B%{nonexistent.bar}'" + "}", createVariables())) .isEmpty(); } @Test public void testExpandIfAllAvailableWithStructFieldExpandsIfPresent() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_all_available: 'struct.foo'" + " flag: '-A%{struct.foo}'" + " flag: '-B%{struct.bar}'" + "}", createStructureVariables( "struct", new Variables.StructureBuilder() .addField("foo", "fooValue") .addField("bar", "barValue")))) .containsExactly("-AfooValue", "-BbarValue"); } @Test public void testExpandIfAllAvailableWithStructFieldDoesntExpandIfMissing() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_all_available: 'struct.foo'" + " flag: '-A%{struct.foo}'" + " flag: '-B%{struct.bar}'" + "}", createStructureVariables( "struct", new Variables.StructureBuilder().addField("bar", "barValue")))) .isEmpty(); } @Test public void testExpandIfAllAvailableWithStructFieldScopesRight() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " flag_group {" + " expand_if_all_available: 'struct.foo'" + " flag: '-A%{struct.foo}'" + " }" + " flag_group { " + " flag: '-B%{struct.bar}'" + " }" + "}", createStructureVariables( "struct", new Variables.StructureBuilder().addField("bar", "barValue")))) .containsExactly("-BbarValue"); } @Test public void testExpandIfNoneAvailableExpandsIfNotAvailable() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " flag_group {" + " expand_if_none_available: 'not_available'" + " flag: '-foo'" + " }" + " flag_group { " + " expand_if_none_available: 'available'" + " flag: '-bar'" + " }" + "}", createVariables("available", "available"))) .containsExactly("-foo"); } @Test public void testExpandIfNoneAvailableDoesntExpandIfThereIsOneOfManyAvailable() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " flag_group {" + " expand_if_none_available: 'not_available'" + " expand_if_none_available: 'available'" + " flag: '-foo'" + " }" + "}", createVariables("available", "available"))) .isEmpty(); } @Test public void testExpandIfTrueDoesntExpandIfMissing() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_true: 'missing'" + " flag: '-A%{missing}'" + "}" + "flag_group {" + " expand_if_false: 'missing'" + " flag: '-B%{missing}'" + "}", createVariables())) .isEmpty(); } @Test public void testExpandIfTrueExpandsIfOne() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_true: 'struct.bool'" + " flag: '-A%{struct.foo}'" + " flag: '-B%{struct.bar}'" + "}" + "flag_group {" + " expand_if_false: 'struct.bool'" + " flag: '-X%{struct.foo}'" + " flag: '-Y%{struct.bar}'" + "}", createStructureVariables( "struct", new Variables.StructureBuilder() .addField("bool", new IntegerValue(1)) .addField("foo", "fooValue") .addField("bar", "barValue")))) .containsExactly("-AfooValue", "-BbarValue"); } @Test public void testExpandIfTrueExpandsIfZero() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_true: 'struct.bool'" + " flag: '-A%{struct.foo}'" + " flag: '-B%{struct.bar}'" + "}" + "flag_group {" + " expand_if_false: 'struct.bool'" + " flag: '-X%{struct.foo}'" + " flag: '-Y%{struct.bar}'" + "}", createStructureVariables( "struct", new Variables.StructureBuilder() .addField("bool", new IntegerValue(0)) .addField("foo", "fooValue") .addField("bar", "barValue")))) .containsExactly("-XfooValue", "-YbarValue"); } @Test public void testExpandIfEqual() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " expand_if_equal: { variable: 'var' value: 'equal_value' }" + " flag: '-foo_%{var}'" + "}" + "flag_group {" + " expand_if_equal: { variable: 'var' value: 'non_equal_value' }" + " flag: '-bar_%{var}'" + "}" + "flag_group {" + " expand_if_equal: { variable: 'non_existing_var' value: 'non_existing' }" + " flag: '-baz_%{non_existing_var}'" + "}", createVariables("var", "equal_value"))) .containsExactly("-foo_equal_value"); } @Test public void testListVariableExpansion() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group { iterate_over: 'v' flag: '%{v}' }", createVariables("v", "1", "v", "2"))) .containsExactly("1", "2"); } @Test public void testListVariableExpansionMixedWithNonListVariable() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group { iterate_over: 'v1' flag: '%{v1} %{v2}' }", createVariables("v1", "a1", "v1", "a2", "v2", "b"))) .containsExactly("a1 b", "a2 b"); } @Test public void testNestedListVariableExpansion() throws Exception { assertThat( getCommandLineForFlagGroups( "flag_group {" + " iterate_over: 'v1'" + " flag_group {" + " iterate_over: 'v2'" + " flag: '%{v1} %{v2}'" + " }" + "}", createVariables("v1", "a1", "v1", "a2", "v2", "b1", "v2", "b2"))) .containsExactly("a1 b1", "a1 b2", "a2 b1", "a2 b2"); } @Test public void testListVariableExpansionMixedWithImplicitlyAccessedListVariableFails() throws Exception { assertThat( getFlagGroupsExpansionError( "flag_group { iterate_over: 'v1' flag: '%{v1} %{v2}' }", createVariables("v1", "a1", "v1", "a2", "v2", "b1", "v2", "b2"))) .contains("Cannot expand variable 'v2': expected string, found sequence"); } @Test public void testFlagGroupVariableExpansion() throws Exception { assertThat( getCommandLineForFlagGroups( "" + "flag_group { iterate_over: 'v' flag: '-f' flag: '%{v}' }" + "flag_group { flag: '-end' }", createVariables("v", "1", "v", "2"))) .containsExactly("-f", "1", "-f", "2", "-end"); assertThat( getCommandLineForFlagGroups( "" + "flag_group { iterate_over: 'v' flag: '-f' flag: '%{v}' }" + "flag_group { iterate_over: 'v' flag: '%{v}' }", createVariables("v", "1", "v", "2"))) .containsExactly("-f", "1", "-f", "2", "1", "2"); assertThat( getCommandLineForFlagGroups( "" + "flag_group { iterate_over: 'v' flag: '-f' flag: '%{v}' } " + "flag_group { iterate_over: 'v' flag: '%{v}' }", createVariables("v", "1", "v", "2"))) .containsExactly("-f", "1", "-f", "2", "1", "2"); } private VariableValueBuilder createNestedSequence(int depth, int count, String prefix) { if (depth == 0) { StringSequenceBuilder builder = new StringSequenceBuilder(); for (int i = 0; i < count; ++i) { String value = prefix + i; builder.addValue(value); } return builder; } else { SequenceBuilder builder = new SequenceBuilder(); for (int i = 0; i < count; ++i) { String value = prefix + i; builder.addValue(createNestedSequence(depth - 1, count, value)); } return builder; } } private Variables createNestedVariables(String name, int depth, int count) { return new Variables.Builder() .addCustomBuiltVariable(name, createNestedSequence(depth, count, "")) .build(); } @Test public void testFlagTreeVariableExpansion() throws Exception { String nestedGroup = "" + "flag_group {" + " iterate_over: 'v'" + " flag_group { flag: '-a' }" + " flag_group { iterate_over: 'v' flag: '%{v}' }" + " flag_group { flag: '-b' }" + "}"; assertThat(getCommandLineForFlagGroups(nestedGroup, createNestedVariables("v", 1, 3))) .containsExactly( "-a", "00", "01", "02", "-b", "-a", "10", "11", "12", "-b", "-a", "20", "21", "22", "-b"); try { getCommandLineForFlagGroups(nestedGroup, createNestedVariables("v", 2, 3)); fail("Expected ExpansionException"); } catch (ExpansionException e) { assertThat(e).hasMessageThat().contains("'v'"); } try { buildFeatures( "feature {", " name: 'a'", " flag_set {", " action: 'c++-compile'", " flag_group {", " flag_group { flag: '-f' }", " flag: '-f'", " }", " }", "}"); fail("Expected ExpansionException"); } catch (ExpansionException e) { assertThat(e).hasMessageThat().contains("Invalid toolchain configuration"); } } @Test public void testImplies() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' implies: 'b' implies: 'c' }", "feature { name: 'b' }", "feature { name: 'c' implies: 'd' }", "feature { name: 'd' }", "feature { name: 'e' }"); assertThat(getEnabledFeatures(features, "a")).containsExactly("a", "b", "c", "d"); } @Test public void testRequires() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' requires: { feature: 'b' } }", "feature { name: 'b' requires: { feature: 'c' } }", "feature { name: 'c' }"); assertThat(getEnabledFeatures(features, "a")).isEmpty(); assertThat(getEnabledFeatures(features, "a", "b")).isEmpty(); assertThat(getEnabledFeatures(features, "a", "c")).containsExactly("c"); assertThat(getEnabledFeatures(features, "a", "b", "c")).containsExactly("a", "b", "c"); } @Test public void testDisabledRequirementChain() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' }", "feature { name: 'b' requires: { feature: 'c' } implies: 'a' }", "feature { name: 'c' }"); assertThat(getEnabledFeatures(features, "b")).isEmpty(); features = buildFeatures( "feature { name: 'a' }", "feature { name: 'b' requires: { feature: 'a' } implies: 'c' }", "feature { name: 'c' }", "feature { name: 'd' requires: { feature: 'c' } implies: 'e' }", "feature { name: 'e' }"); assertThat(getEnabledFeatures(features, "b", "d")).isEmpty(); } @Test public void testEnabledRequirementChain() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: '0' implies: 'a' }", "feature { name: 'a' }", "feature { name: 'b' requires: { feature: 'a' } implies: 'c' }", "feature { name: 'c' }", "feature { name: 'd' requires: { feature: 'c' } implies: 'e' }", "feature { name: 'e' }"); assertThat(getEnabledFeatures(features, "0", "b", "d")).containsExactly( "0", "a", "b", "c", "d", "e"); } @Test public void testLogicInRequirements() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' requires: { feature: 'b' feature: 'c' } requires: { feature: 'd' } }", "feature { name: 'b' }", "feature { name: 'c' }", "feature { name: 'd' }"); assertThat(getEnabledFeatures(features, "a", "b", "c")).containsExactly("a", "b", "c"); assertThat(getEnabledFeatures(features, "a", "b")).containsExactly("b"); assertThat(getEnabledFeatures(features, "a", "c")).containsExactly("c"); assertThat(getEnabledFeatures(features, "a", "d")).containsExactly("a", "d"); } @Test public void testImpliesImpliesRequires() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' implies: 'b' }", "feature { name: 'b' requires: { feature: 'c' } }", "feature { name: 'c' }"); assertThat(getEnabledFeatures(features, "a")).isEmpty(); } @Test public void testMultipleImplies() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' implies: 'b' implies: 'c' implies: 'd' }", "feature { name: 'b' }", "feature { name: 'c' requires: { feature: 'e' } }", "feature { name: 'd' }", "feature { name: 'e' }"); assertThat(getEnabledFeatures(features, "a")).isEmpty(); assertThat(getEnabledFeatures(features, "a", "e")).containsExactly("a", "b", "c", "d", "e"); } @Test public void testDisabledFeaturesDoNotEnableImplications() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' implies: 'b' requires: { feature: 'c' } }", "feature { name: 'b' }", "feature { name: 'c' }"); assertThat(getEnabledFeatures(features, "a")).isEmpty(); } @Test public void testFeatureNameCollision() throws Exception { try { buildFeatures( "feature { name: '<<<collision>>>' }", "feature { name: '<<<collision>>>' }"); fail("Expected InvalidConfigurationException"); } catch (InvalidConfigurationException e) { assertThat(e).hasMessageThat().contains("<<<collision>>>"); } } @Test public void testReferenceToUndefinedFeature() throws Exception { try { buildFeatures("feature { name: 'a' implies: '<<<undefined>>>' }"); fail("Expected InvalidConfigurationException"); } catch (InvalidConfigurationException e) { assertThat(e).hasMessageThat().contains("<<<undefined>>>"); } } @Test public void testImpliesWithCycle() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' implies: 'b' }", "feature { name: 'b' implies: 'a' }"); assertThat(getEnabledFeatures(features, "a")).containsExactly("a", "b"); assertThat(getEnabledFeatures(features, "b")).containsExactly("a", "b"); } @Test public void testMultipleImpliesCycle() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' implies: 'b' implies: 'c' implies: 'd' }", "feature { name: 'b' }", "feature { name: 'c' requires: { feature: 'e' } }", "feature { name: 'd' requires: { feature: 'f' } }", "feature { name: 'e' requires: { feature: 'c' } }", "feature { name: 'f' }"); assertThat(getEnabledFeatures(features, "a", "e")).isEmpty(); assertThat(getEnabledFeatures(features, "a", "e", "f")).containsExactly( "a", "b", "c", "d", "e", "f"); } @Test public void testRequiresWithCycle() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' requires: { feature: 'b' } }", "feature { name: 'b' requires: { feature: 'a' } }", "feature { name: 'c' implies: 'a' }", "feature { name: 'd' implies: 'b' }"); assertThat(getEnabledFeatures(features, "c")).isEmpty(); assertThat(getEnabledFeatures(features, "d")).isEmpty(); assertThat(getEnabledFeatures(features, "c", "d")).containsExactly("a", "b", "c", "d"); } @Test public void testImpliedByOneEnabledAndOneDisabledFeature() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' }", "feature { name: 'b' requires: { feature: 'a' } implies: 'd' }", "feature { name: 'c' implies: 'd' }", "feature { name: 'd' }"); assertThat(getEnabledFeatures(features, "b", "c")).containsExactly("c", "d"); } @Test public void testRequiresOneEnabledAndOneUnsupportedFeature() throws Exception { CcToolchainFeatures features = buildFeatures( "feature { name: 'a' requires: { feature: 'b' } requires: { feature: 'c' } }", "feature { name: 'b' }", "feature { name: 'c' requires: { feature: 'd' } }", "feature { name: 'd' }"); assertThat(getEnabledFeatures(features, "a", "b", "c")).containsExactly("a", "b"); } @Test public void testFlagSetWithMissingVariableIsNotExpanded() throws Exception { FeatureConfiguration configuration = buildFeatures( "feature {", " name: 'a'", " flag_set {", " action: 'c++-compile'", " expand_if_all_available: 'v'", " flag_group { flag: '%{v}' }", " }", " flag_set {", " action: 'c++-compile'", " flag_group { flag: 'unconditional' }", " }", "}") .getFeatureConfiguration(assumptionsFor("a")); assertThat(configuration.getCommandLine(CppCompileAction.CPP_COMPILE, createVariables())) .containsExactly("unconditional"); } @Test public void testOnlyFlagSetsWithAllVariablesPresentAreExpanded() throws Exception { FeatureConfiguration configuration = buildFeatures( "feature {", " name: 'a'", " flag_set {", " action: 'c++-compile'", " expand_if_all_available: 'v'", " flag_group { flag: '%{v}' }", " }", " flag_set {", " action: 'c++-compile'", " expand_if_all_available: 'v'", " expand_if_all_available: 'w'", " flag_group { flag: '%{v}%{w}' }", " }", " flag_set {", " action: 'c++-compile'", " flag_group { flag: 'unconditional' }", " }", "}") .getFeatureConfiguration(assumptionsFor("a")); assertThat( configuration.getCommandLine(CppCompileAction.CPP_COMPILE, createVariables("v", "1"))) .containsExactly("1", "unconditional"); } @Test public void testOnlyInnerFlagSetIsIteratedWithSequenceVariable() throws Exception { FeatureConfiguration configuration = buildFeatures( "feature {", " name: 'a'", " flag_set {", " action: 'c++-compile'", " expand_if_all_available: 'v'", " flag_group { iterate_over: 'v' flag: '%{v}' }", " }", " flag_set {", " action: 'c++-compile'", " expand_if_all_available: 'v'", " expand_if_all_available: 'w'", " flag_group { iterate_over: 'v' flag: '%{v}%{w}' }", " }", " flag_set {", " action: 'c++-compile'", " flag_group { flag: 'unconditional' }", " }", "}") .getFeatureConfiguration(assumptionsFor("a")); assertThat( configuration.getCommandLine( CppCompileAction.CPP_COMPILE, createVariables("v", "1", "v", "2"))) .containsExactly("1", "2", "unconditional") .inOrder(); } @Test public void testFlagSetsAreIteratedIndividuallyForSequenceVariables() throws Exception { FeatureConfiguration configuration = buildFeatures( "feature {", " name: 'a'", " flag_set {", " action: 'c++-compile'", " expand_if_all_available: 'v'", " flag_group { iterate_over: 'v' flag: '%{v}' }", " }", " flag_set {", " action: 'c++-compile'", " expand_if_all_available: 'v'", " expand_if_all_available: 'w'", " flag_group { iterate_over: 'v' flag: '%{v}%{w}' }", " }", " flag_set {", " action: 'c++-compile'", " flag_group { flag: 'unconditional' }", " }", "}") .getFeatureConfiguration(assumptionsFor("a")); assertThat( configuration.getCommandLine( CppCompileAction.CPP_COMPILE, createVariables("v", "1", "v", "2", "w", "3"))) .containsExactly("1", "2", "13", "23", "unconditional") .inOrder(); } @Test public void testConfiguration() throws Exception { CcToolchainFeatures features = buildFeatures( "feature {", " name: 'a'", " flag_set {", " action: 'c++-compile'", " flag_group {", " flag: '-f'", " flag: '%{v}'", " }", " }", "}", "feature { name: 'b' implies: 'a' }"); assertThat(getEnabledFeatures(features, "b")).containsExactly("a", "b"); assertThat( features .getFeatureConfiguration(assumptionsFor("b")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables("v", "1"))) .containsExactly("-f", "1"); byte[] serialized = TestUtils.serializeObject(features); CcToolchainFeatures deserialized = (CcToolchainFeatures) TestUtils.deserializeObject(serialized); assertThat(getEnabledFeatures(deserialized, "b")).containsExactly("a", "b"); assertThat( features .getFeatureConfiguration(assumptionsFor("b")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables("v", "1"))) .containsExactly("-f", "1"); } @Test public void testDefaultFeatures() throws Exception { CcToolchainFeatures features = buildFeatures("feature { name: 'a' }", "feature { name: 'b' enabled: true }"); assertThat(features.getDefaultFeatures()).containsExactly("b"); } @Test public void testWithFeature_OneSetOneFeature() throws Exception { CcToolchainFeatures features = buildFeatures( "feature {", " name: 'a'", " flag_set {", " with_feature {feature: 'b'}", " action: 'c++-compile'", " flag_group {", " flag: 'dummy_flag'", " }", " }", "}", "feature {name: 'b'}"); assertThat( features .getFeatureConfiguration(assumptionsFor("a", "b")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables())) .containsExactly("dummy_flag"); assertThat( features .getFeatureConfiguration(assumptionsFor("a")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables())) .doesNotContain("dummy_flag"); } @Test public void testWithFeature_OneSetMultipleFeatures() throws Exception { CcToolchainFeatures features = buildFeatures( "feature {", " name: 'a'", " flag_set {", " with_feature {feature: 'b', feature: 'c'}", " action: 'c++-compile'", " flag_group {", " flag: 'dummy_flag'", " }", " }", "}", "feature {name: 'b'}", "feature {name: 'c'}"); assertThat( features .getFeatureConfiguration(assumptionsFor("a", "b", "c")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables())) .containsExactly("dummy_flag"); assertThat( features .getFeatureConfiguration(assumptionsFor("a", "b")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables())) .doesNotContain("dummy_flag"); assertThat( features .getFeatureConfiguration(assumptionsFor("a")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables())) .doesNotContain("dummy_flag"); } @Test public void testWithFeature_MulipleSetsMultipleFeatures() throws Exception { CcToolchainFeatures features = buildFeatures( "feature {", " name: 'a'", " flag_set {", " with_feature {feature: 'b1', feature: 'c1'}", " with_feature {feature: 'b2', feature: 'c2'}", " action: 'c++-compile'", " flag_group {", " flag: 'dummy_flag'", " }", " }", "}", "feature {name: 'b1'}", "feature {name: 'c1'}", "feature {name: 'b2'}", "feature {name: 'c2'}"); assertThat( features .getFeatureConfiguration(assumptionsFor("a", "b1", "c1", "b2", "c2")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables())) .containsExactly("dummy_flag"); assertThat( features .getFeatureConfiguration(assumptionsFor("a", "b1", "c1")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables())) .containsExactly("dummy_flag"); assertThat( features .getFeatureConfiguration(assumptionsFor("a", "b1", "b2")) .getCommandLine(CppCompileAction.CPP_COMPILE, createVariables())) .doesNotContain("dummy_flag"); } @Test public void testActivateActionConfigFromFeature() throws Exception { CcToolchainFeatures toolchainFeatures = buildFeatures( "action_config {", " config_name: 'action-a'", " action_name: 'action-a'", " tool {", " tool_path: 'toolchain/feature-a'", " with_feature: { feature: 'feature-a' }", " }", "}", "feature {", " name: 'activates-action-a'", " implies: 'action-a'", "}"); FeatureConfiguration featureConfiguration = toolchainFeatures.getFeatureConfiguration(assumptionsFor("activates-action-a")); assertThat(featureConfiguration.actionIsConfigured("action-a")).isTrue(); } @Test public void testFeatureCanRequireActionConfig() throws Exception { CcToolchainFeatures toolchainFeatures = buildFeatures( "action_config {", " config_name: 'action-a'", " action_name: 'action-a'", " tool {", " tool_path: 'toolchain/feature-a'", " with_feature: { feature: 'feature-a' }", " }", "}", "feature {", " name: 'requires-action-a'", " requires: { feature: 'action-a' }", "}"); FeatureConfiguration featureConfigurationWithoutAction = toolchainFeatures.getFeatureConfiguration(assumptionsFor("requires-action-a")); assertThat(featureConfigurationWithoutAction.isEnabled("requires-action-a")).isFalse(); FeatureConfiguration featureConfigurationWithAction = toolchainFeatures.getFeatureConfiguration(assumptionsFor("action-a", "requires-action-a")); assertThat(featureConfigurationWithAction.isEnabled("requires-action-a")).isTrue(); } @Test public void testSimpleActionTool() throws Exception { FeatureConfiguration configuration = buildFeatures( "action_config {", " config_name: 'action-a'", " action_name: 'action-a'", " tool {", " tool_path: 'toolchain/a'", " }", "}", "feature {", " name: 'activates-action-a'", " implies: 'action-a'", "}") .getFeatureConfiguration(assumptionsFor("activates-action-a")); PathFragment crosstoolPath = PathFragment.create("crosstool/"); PathFragment toolPath = configuration.getToolForAction("action-a").getToolPath(crosstoolPath); assertThat(toolPath.toString()).isEqualTo("crosstool/toolchain/a"); } @Test public void testActionToolFromFeatureSet() throws Exception { CcToolchainFeatures toolchainFeatures = buildFeatures( "action_config {", " config_name: 'action-a'", " action_name: 'action-a'", " tool {", " tool_path: 'toolchain/features-a-and-b'", " with_feature: {", " feature: 'feature-a'", " feature: 'feature-b'", " }", " }", " tool {", " tool_path: 'toolchain/feature-a'", " with_feature: { feature: 'feature-a' }", " }", " tool {", " tool_path: 'toolchain/feature-b'", " with_feature: { feature: 'feature-b' }", " }", " tool {", " tool_path: 'toolchain/default'", " }", "}", "feature {", " name: 'feature-a'", "}", "feature {", " name: 'feature-b'", "}", "feature {", " name: 'activates-action-a'", " implies: 'action-a'", "}"); PathFragment crosstoolPath = PathFragment.create("crosstool/"); FeatureConfiguration featureAConfiguration = toolchainFeatures.getFeatureConfiguration( assumptionsFor("feature-a", "activates-action-a")); assertThat( featureAConfiguration .getToolForAction("action-a") .getToolPath(crosstoolPath) .toString()) .isEqualTo("crosstool/toolchain/feature-a"); FeatureConfiguration featureBConfiguration = toolchainFeatures.getFeatureConfiguration( assumptionsFor("feature-b", "activates-action-a")); assertThat( featureBConfiguration .getToolForAction("action-a") .getToolPath(crosstoolPath) .toString()) .isEqualTo("crosstool/toolchain/feature-b"); FeatureConfiguration featureAAndBConfiguration = toolchainFeatures.getFeatureConfiguration( assumptionsFor("feature-a", "feature-b", "activates-action-a")); assertThat( featureAAndBConfiguration .getToolForAction("action-a") .getToolPath(crosstoolPath) .toString()) .isEqualTo("crosstool/toolchain/features-a-and-b"); FeatureConfiguration noFeaturesConfiguration = toolchainFeatures.getFeatureConfiguration(assumptionsFor("activates-action-a")); assertThat( noFeaturesConfiguration .getToolForAction("action-a") .getToolPath(crosstoolPath) .toString()) .isEqualTo("crosstool/toolchain/default"); } @Test public void testErrorForNoMatchingTool() throws Exception { CcToolchainFeatures toolchainFeatures = buildFeatures( "action_config {", " config_name: 'action-a'", " action_name: 'action-a'", " tool {", " tool_path: 'toolchain/feature-a'", " with_feature: { feature: 'feature-a' }", " }", "}", "feature {", " name: 'feature-a'", "}", "feature {", " name: 'activates-action-a'", " implies: 'action-a'", "}"); PathFragment crosstoolPath = PathFragment.create("crosstool/"); FeatureConfiguration noFeaturesConfiguration = toolchainFeatures.getFeatureConfiguration(assumptionsFor("activates-action-a")); try { noFeaturesConfiguration.getToolForAction("action-a").getToolPath(crosstoolPath); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e) .hasMessageThat() .contains("Matching tool for action action-a not found for given feature configuration"); } } @Test public void testActivateActionConfigDirectly() throws Exception { CcToolchainFeatures toolchainFeatures = buildFeatures( "action_config {", " config_name: 'action-a'", " action_name: 'action-a'", " tool {", " tool_path: 'toolchain/feature-a'", " with_feature: { feature: 'feature-a' }", " }", "}"); FeatureConfiguration featureConfiguration = toolchainFeatures.getFeatureConfiguration(assumptionsFor("action-a")); assertThat(featureConfiguration.actionIsConfigured("action-a")).isTrue(); } @Test public void testActionConfigCanActivateFeature() throws Exception { CcToolchainFeatures toolchainFeatures = buildFeatures( "action_config {", " config_name: 'action-a'", " action_name: 'action-a'", " tool {", " tool_path: 'toolchain/feature-a'", " with_feature: { feature: 'feature-a' }", " }", " implies: 'activated-feature'", "}", "feature {", " name: 'activated-feature'", "}"); FeatureConfiguration featureConfiguration = toolchainFeatures.getFeatureConfiguration(assumptionsFor("action-a")); assertThat(featureConfiguration.isEnabled("activated-feature")).isTrue(); } @Test public void testInvalidActionConfigurationDuplicateActionConfigs() throws Exception { try { buildFeatures( "action_config {", " config_name: 'action-a'", " action_name: 'action-1'", "}", "action_config {", " config_name: 'action-a'", " action_name: 'action-2'", "}"); fail("Expected InvalidConfigurationException"); } catch (InvalidConfigurationException e) { assertThat(e) .hasMessageThat() .contains("feature or action config 'action-a' was specified multiple times."); } } @Test public void testInvalidActionConfigurationMultipleActionConfigsForAction() throws Exception { try { buildFeatures( "action_config {", " config_name: 'name-a'", " action_name: 'action-a'", "}", "action_config {", " config_name: 'name-b'", " action_name: 'action-a'", "}"); fail("Expected InvalidConfigurationException"); } catch (InvalidConfigurationException e) { assertThat(e).hasMessageThat().contains("multiple action configs for action 'action-a'"); } } @Test public void testFlagsFromActionConfig() throws Exception { FeatureConfiguration featureConfiguration = buildFeatures( "action_config {", " config_name: 'c++-compile'", " action_name: 'c++-compile'", " flag_set {", " flag_group {flag: 'foo'}", " }", "}") .getFeatureConfiguration(assumptionsFor("c++-compile")); List<String> commandLine = featureConfiguration.getCommandLine("c++-compile", createVariables()); assertThat(commandLine).contains("foo"); } @Test public void testErrorForFlagFromActionConfigWithSpecifiedAction() throws Exception { try { buildFeatures( "action_config {", " config_name: 'c++-compile'", " action_name: 'c++-compile'", " flag_set {", " action: 'c++-compile'", " flag_group {flag: 'foo'}", " }", "}") .getFeatureConfiguration(assumptionsFor("c++-compile")); fail("Should throw InvalidConfigurationException"); } catch (InvalidConfigurationException e) { assertThat(e) .hasMessageThat() .contains(String.format(ActionConfig.FLAG_SET_WITH_ACTION_ERROR, "c++-compile")); } } @Test public void testLibraryToLinkValue() { assertThat( LibraryToLinkValue.forDynamicLibrary("foo") .getFieldValue("LibraryToLinkValue", LibraryToLinkValue.NAME_FIELD_NAME) .getStringValue(LibraryToLinkValue.NAME_FIELD_NAME)) .isEqualTo("foo"); assertThat( LibraryToLinkValue.forDynamicLibrary("foo") .getFieldValue("LibraryToLinkValue", LibraryToLinkValue.OBJECT_FILES_FIELD_NAME)) .isNull(); assertThat( LibraryToLinkValue.forObjectFileGroup(ImmutableList.of("foo", "bar"), false) .getFieldValue("LibraryToLinkValue", LibraryToLinkValue.NAME_FIELD_NAME)) .isNull(); Iterable<? extends VariableValue> objects = LibraryToLinkValue.forObjectFileGroup(ImmutableList.of("foo", "bar"), false) .getFieldValue("LibraryToLinkValue", LibraryToLinkValue.OBJECT_FILES_FIELD_NAME) .getSequenceValue(LibraryToLinkValue.OBJECT_FILES_FIELD_NAME); ImmutableList.Builder<String> objectNames = ImmutableList.builder(); for (VariableValue object : objects) { objectNames.add(object.getStringValue("name")); } assertThat(objectNames.build()).containsExactly("foo", "bar"); } @Test public void testProvidesCollision() throws Exception { try { buildFeatures( "feature {", " name: 'a'", " provides: 'provides_string'", "}", "feature {", " name: 'b'", " provides: 'provides_string'", "}") .getFeatureConfiguration( FeatureSpecification.create(ImmutableSet.of("a", "b"), ImmutableSet.<String>of())); fail("Should throw CollidingProvidesException on collision, instead did not throw."); } catch (Exception e) { assertThat(e).hasMessageThat().contains("a b"); } } }
// Copyright 2006-2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.syntax; import static com.google.common.truth.Truth.assertThat; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.EventCollector; import com.google.devtools.build.lib.events.EventKind; import com.google.devtools.build.lib.events.Reporter; import com.google.devtools.build.lib.events.util.EventCollectionApparatus; import com.google.devtools.build.lib.packages.CachingPackageLocator; import com.google.devtools.build.lib.packages.PackageIdentifier; import com.google.devtools.build.lib.testutil.MoreAsserts; import com.google.devtools.build.lib.testutil.Scratch; import com.google.devtools.build.lib.vfs.Path; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import java.io.IOException; import java.util.Arrays; @RunWith(JUnit4.class) public class BuildFileASTTest { private Scratch scratch = new Scratch(); private EventCollectionApparatus events = new EventCollectionApparatus(EventKind.ALL_EVENTS); private class ScratchPathPackageLocator implements CachingPackageLocator { @Override public Path getBuildFileForPackage(PackageIdentifier packageName) { return scratch.resolve(packageName.getPackageFragment()).getRelative("BUILD"); } } private CachingPackageLocator locator = new ScratchPathPackageLocator(); /** * Parses the contents of the specified string (using DUMMY_PATH as the fake * filename) and returns the AST. Resets the error handler beforehand. */ private BuildFileAST parseBuildFile(String... lines) throws IOException { Path file = scratch.file("/a/build/file/BUILD", lines); return BuildFileAST.parseBuildFile(file, events.reporter(), locator, false); } @Test public void testParseBuildFileOK() throws Exception { Path buildFile = scratch.file("/BUILD", "# a file in the build language", "", "x = [1,2,'foo',4] + [1,2, \"%s%d\" % ('foo', 1)]"); Environment env = new Environment(); Reporter reporter = new Reporter(); BuildFileAST buildfile = BuildFileAST.parseBuildFile(buildFile, reporter, null, false); assertTrue(buildfile.exec(env, reporter)); // Test final environment is correctly modified: // // input1.BUILD contains: // x = [1,2,'foo',4] + [1,2, "%s%d" % ('foo', 1)] assertEquals(Arrays.<Object>asList(1, 2, "foo", 4, 1, 2, "foo1"), env.lookup("x")); } @Test public void testEvalException() throws Exception { Path buildFile = scratch.file("/input1.BUILD", "x = 1", "y = [2,3]", "", "z = x + y"); Environment env = new Environment(); Reporter reporter = new Reporter(); EventCollector collector = new EventCollector(EventKind.ALL_EVENTS); reporter.addHandler(collector); BuildFileAST buildfile = BuildFileAST.parseBuildFile(buildFile, reporter, null, false); assertFalse(buildfile.exec(env, reporter)); Event e = MoreAsserts.assertContainsEvent(collector, "unsupported operand type(s) for +: 'int' and 'List'"); assertEquals(4, e.getLocation().getStartLineAndColumn().getLine()); } @Test public void testParsesFineWithNewlines() throws Exception { BuildFileAST buildFileAST = parseBuildFile("foo()\n" + "bar()\n" + "something = baz()\n" + "bar()"); assertThat(buildFileAST.getStatements()).hasSize(4); } @Test public void testFailsIfNewlinesAreMissing() throws Exception { events.setFailFast(false); BuildFileAST buildFileAST = parseBuildFile("foo() bar() something = baz() bar()"); Event event = events.collector().iterator().next(); assertEquals("syntax error at \'bar\': expected newline", event.getMessage()); assertEquals("/a/build/file/BUILD", event.getLocation().getPath().toString()); assertEquals(1, event.getLocation().getStartLineAndColumn().getLine()); assertTrue(buildFileAST.containsErrors()); } @Test public void testImplicitStringConcatenationFails() throws Exception { events.setFailFast(false); BuildFileAST buildFileAST = parseBuildFile("a = 'foo' 'bar'"); Event event = events.collector().iterator().next(); assertEquals("Implicit string concatenation is forbidden, use the + operator", event.getMessage()); assertEquals("/a/build/file/BUILD", event.getLocation().getPath().toString()); assertEquals(1, event.getLocation().getStartLineAndColumn().getLine()); assertEquals(10, event.getLocation().getStartLineAndColumn().getColumn()); assertTrue(buildFileAST.containsErrors()); } @Test public void testImplicitStringConcatenationAcrossLinesIsIllegal() throws Exception { events.setFailFast(false); BuildFileAST buildFileAST = parseBuildFile("a = 'foo'\n 'bar'"); Event event = events.collector().iterator().next(); assertEquals("indentation error", event.getMessage()); assertEquals("/a/build/file/BUILD", event.getLocation().getPath().toString()); assertEquals(2, event.getLocation().getStartLineAndColumn().getLine()); assertEquals(2, event.getLocation().getStartLineAndColumn().getColumn()); assertTrue(buildFileAST.containsErrors()); } /** * If the specified EventCollector does contain an event which has * 'expectedEvent' as a substring, the matching event is * returned. Otherwise this will return null. */ public static Event findEvent(EventCollector eventCollector, String expectedEvent) { for (Event event : eventCollector) { if (event.getMessage().contains(expectedEvent)) { return event; } } return null; } @Test public void testWithSyntaxErrorsDoesNotPrintDollarError() throws Exception { events.setFailFast(false); BuildFileAST buildFile = parseBuildFile( "abi = cxx_abi + '-glibc-' + glibc_version + '-' + generic_cpu + '-' + sysname", "libs = [abi + opt_level + '/lib/libcc.a']", "shlibs = [abi + opt_level + '/lib/libcc.so']", "+* shlibs", // syntax error at '+' "cc_library(name = 'cc',", " srcs = libs,", " includes = [ abi + opt_level + '/include' ])"); assertTrue(buildFile.containsErrors()); Event event = events.collector().iterator().next(); assertEquals("syntax error at '+': expected expression", event.getMessage()); Environment env = new Environment(); assertFalse(buildFile.exec(env, events.reporter())); assertNull(findEvent(events.collector(), "$error$")); // This message should not be printed anymore. Event event2 = findEvent(events.collector(), "contains syntax error(s)"); assertNull(event2); } @Test public void testInclude() throws Exception { scratch.file("/foo/bar/BUILD", "c = 4\n" + "d = 5\n"); Path buildFile = scratch.file("/BUILD", "a = 2\n" + "include(\"//foo/bar:BUILD\")\n" + "b = 4\n"); BuildFileAST buildFileAST = BuildFileAST.parseBuildFile(buildFile, events.reporter(), locator, false); assertFalse(buildFileAST.containsErrors()); assertThat(buildFileAST.getStatements()).hasSize(5); } @Test public void testInclude2() throws Exception { scratch.file("/foo/bar/defs", "a = 1\n"); Path buildFile = scratch.file("/BUILD", "include(\"//foo/bar:defs\")\n" + "b = a + 1\n"); BuildFileAST buildFileAST = BuildFileAST.parseBuildFile(buildFile, events.reporter(), locator, false); assertFalse(buildFileAST.containsErrors()); assertThat(buildFileAST.getStatements()).hasSize(3); Environment env = new Environment(); Reporter reporter = new Reporter(); assertFalse(buildFileAST.exec(env, reporter)); assertEquals(2, env.lookup("b")); } @Test public void testMultipleIncludes() throws Exception { String fileA = "include(\"//foo:fileB\")\n" + "include(\"//foo:fileC\")\n"; scratch.file("/foo/fileB", "b = 3\n" + "include(\"//foo:fileD\")\n"); scratch.file("/foo/fileC", "include(\"//foo:fileD\")\n" + "c = b + 2\n"); scratch.file("/foo/fileD", "b = b + 1\n"); // this code is included twice BuildFileAST buildFileAST = parseBuildFile(fileA); assertFalse(buildFileAST.containsErrors()); assertThat(buildFileAST.getStatements()).hasSize(8); Environment env = new Environment(); Reporter reporter = new Reporter(); assertFalse(buildFileAST.exec(env, reporter)); assertEquals(5, env.lookup("b")); assertEquals(7, env.lookup("c")); } @Test public void testFailInclude() throws Exception { events.setFailFast(false); BuildFileAST buildFileAST = parseBuildFile("include(\"//nonexistent\")"); assertThat(buildFileAST.getStatements()).hasSize(1); events.assertContainsEvent("Include of '//nonexistent' failed"); } private class EmptyPackageLocator implements CachingPackageLocator { @Override public Path getBuildFileForPackage(PackageIdentifier packageName) { return null; } } private CachingPackageLocator emptyLocator = new EmptyPackageLocator(); @Test public void testFailInclude2() throws Exception { events.setFailFast(false); Path buildFile = scratch.file("/foo/bar/BUILD", "include(\"//nonexistent:foo\")\n"); BuildFileAST buildFileAST = BuildFileAST.parseBuildFile(buildFile, events.reporter(), emptyLocator, false); assertThat(buildFileAST.getStatements()).hasSize(1); events.assertContainsEvent("Package 'nonexistent' not found"); } @Test public void testInvalidInclude() throws Exception { events.setFailFast(false); BuildFileAST buildFileAST = parseBuildFile("include(2)"); assertThat(buildFileAST.getStatements()).isEmpty(); events.assertContainsEvent("syntax error at '2'"); } @Test public void testRecursiveInclude() throws Exception { events.setFailFast(false); Path buildFile = scratch.file("/foo/bar/BUILD", "include(\"//foo/bar:BUILD\")\n"); BuildFileAST.parseBuildFile(buildFile, events.reporter(), locator, false); events.assertContainsEvent("Recursive inclusion"); } @Test public void testParseErrorInclude() throws Exception { events.setFailFast(false); scratch.file("/foo/bar/file", "a = 2 + % 3\n"); // parse error parseBuildFile("include(\"//foo/bar:file\")"); // Check the location is properly reported Event event = events.collector().iterator().next(); assertEquals("/foo/bar/file:1:9", event.getLocation().print()); assertEquals("syntax error at '%': expected expression", event.getMessage()); } @Test public void testNonExistentIncludeReported() throws Exception { events.setFailFast(false); BuildFileAST buildFileAST = parseBuildFile("include('//foo:bar')"); assertThat(buildFileAST.getStatements()).hasSize(1); } @Test public void testFetchSubincludes() throws Exception { BuildFileAST buildFileAST = parseBuildFile( "foo('a')\n", "subinclude()\n", "subinclude('hello')\n", "1 + subinclude('ignored')", "var = 'also ignored'", "subinclude(var)\n", "subinclude('world')"); assertThat(buildFileAST.getSubincludes()).containsExactly("hello", "world"); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.java.decompiler.modules.decompiler.sforms; import org.jetbrains.java.decompiler.modules.decompiler.StatEdge; import org.jetbrains.java.decompiler.modules.decompiler.StatEdge.EdgeType; import org.jetbrains.java.decompiler.modules.decompiler.exps.Exprent; import org.jetbrains.java.decompiler.modules.decompiler.sforms.DirectNode.DirectNodeType; import org.jetbrains.java.decompiler.modules.decompiler.stats.*; import org.jetbrains.java.decompiler.modules.decompiler.stats.DoStatement.LoopType; import org.jetbrains.java.decompiler.modules.decompiler.stats.Statement.StatementType; import java.util.*; import java.util.Map.Entry; public class FlattenStatementsHelper { // statement.id, node.id(direct), node.id(continue) private final Map<Integer, String[]> mapDestinationNodes = new HashMap<>(); // node.id(source), statement.id(destination), edge type private final List<Edge> listEdges = new ArrayList<>(); // node.id(exit), [node.id(source), statement.id(destination)] private final Map<String, List<String[]>> mapShortRangeFinallyPathIds = new HashMap<>(); // node.id(exit), [node.id(source), statement.id(destination)] private final Map<String, List<String[]>> mapLongRangeFinallyPathIds = new HashMap<>(); // positive if branches private final Map<String, Integer> mapPosIfBranch = new HashMap<>(); private DirectGraph graph; private RootStatement root; public DirectGraph buildDirectGraph(RootStatement root) { this.root = root; graph = new DirectGraph(); flattenStatement(); // dummy exit node Statement dummyexit = root.getDummyExit(); DirectNode node = new DirectNode(DirectNodeType.DIRECT, dummyexit, Integer.toString(dummyexit.id)); node.exprents = new ArrayList<>(); graph.nodes.addWithKey(node, node.id); mapDestinationNodes.put(dummyexit.id, new String[]{node.id, null}); setEdges(); graph.first = graph.nodes.getWithKey(mapDestinationNodes.get(root.id)[0]); graph.sortReversePostOrder(); return graph; } private void flattenStatement() { class StatementStackEntry { public final Statement statement; public final LinkedList<StackEntry> stackFinally; public final List<Exprent> tailExprents; public int statementIndex; public int edgeIndex; public List<StatEdge> succEdges; StatementStackEntry(Statement statement, LinkedList<StackEntry> stackFinally, List<Exprent> tailExprents) { this.statement = statement; this.stackFinally = stackFinally; this.tailExprents = tailExprents; } } LinkedList<StatementStackEntry> lstStackStatements = new LinkedList<>(); lstStackStatements.add(new StatementStackEntry(root, new LinkedList<>(), null)); mainloop: while (!lstStackStatements.isEmpty()) { StatementStackEntry statEntry = lstStackStatements.removeFirst(); Statement stat = statEntry.statement; LinkedList<StackEntry> stackFinally = statEntry.stackFinally; int statementBreakIndex = statEntry.statementIndex; DirectNode node, nd; List<StatEdge> lstSuccEdges = new ArrayList<>(); DirectNode sourcenode = null; if (statEntry.succEdges == null) { switch (stat.type) { case BASIC_BLOCK: node = new DirectNode(DirectNodeType.DIRECT, stat, (BasicBlockStatement)stat); if (stat.getExprents() != null) { node.exprents = stat.getExprents(); } graph.nodes.putWithKey(node, node.id); mapDestinationNodes.put(stat.id, new String[]{node.id, null}); lstSuccEdges.addAll(stat.getSuccessorEdges(EdgeType.DIRECT_ALL)); sourcenode = node; List<Exprent> tailExprentList = statEntry.tailExprents; if (tailExprentList != null) { DirectNode tail = new DirectNode(DirectNodeType.TAIL, stat, stat.id + "_tail"); tail.exprents = tailExprentList; graph.nodes.putWithKey(tail, tail.id); mapDestinationNodes.put(-stat.id, new String[]{tail.id, null}); listEdges.add(new Edge(node.id, -stat.id, EdgeType.REGULAR)); sourcenode = tail; } // 'if' statement: record positive branch if (stat.getLastBasicType() == StatementType.IF) { mapPosIfBranch.put(sourcenode.id, lstSuccEdges.get(0).getDestination().id); } break; case CATCH_ALL: case TRY_CATCH: DirectNode firstnd = new DirectNode(DirectNodeType.TRY, stat, stat.id + "_try"); mapDestinationNodes.put(stat.id, new String[]{firstnd.id, null}); graph.nodes.putWithKey(firstnd, firstnd.id); LinkedList<StatementStackEntry> lst = new LinkedList<>(); for (Statement st : stat.getStats()) { listEdges.add(new Edge(firstnd.id, st.id, EdgeType.REGULAR)); LinkedList<StackEntry> stack = stackFinally; if (stat.type == StatementType.CATCH_ALL && ((CatchAllStatement)stat).isFinally()) { stack = new LinkedList<>(stackFinally); if (st == stat.getFirst()) { // catch head stack.add(new StackEntry((CatchAllStatement)stat, Boolean.FALSE)); } else { // handler stack.add(new StackEntry((CatchAllStatement)stat, Boolean.TRUE, EdgeType.BREAK, root.getDummyExit(), st, st, firstnd, firstnd, true)); } } lst.add(new StatementStackEntry(st, stack, null)); } lstStackStatements.addAll(0, lst); break; case DO: if (statementBreakIndex == 0) { statEntry.statementIndex = 1; lstStackStatements.addFirst(statEntry); lstStackStatements.addFirst(new StatementStackEntry(stat.getFirst(), stackFinally, null)); continue mainloop; } nd = graph.nodes.getWithKey(mapDestinationNodes.get(stat.getFirst().id)[0]); DoStatement dostat = (DoStatement)stat; LoopType loopType = dostat.getLoopType(); if (loopType == LoopType.DO) { mapDestinationNodes.put(stat.id, new String[]{nd.id, nd.id}); break; } lstSuccEdges.add(stat.getSuccessorEdges(EdgeType.DIRECT_ALL).get(0)); // exactly one edge switch (loopType) { case WHILE: case DO_WHILE: node = new DirectNode(DirectNodeType.CONDITION, stat, stat.id + "_cond"); node.exprents = dostat.getConditionExprentList(); graph.nodes.putWithKey(node, node.id); listEdges.add(new Edge(node.id, stat.getFirst().id, EdgeType.REGULAR)); if (loopType == LoopType.WHILE) { mapDestinationNodes.put(stat.id, new String[]{node.id, node.id}); } else { mapDestinationNodes.put(stat.id, new String[]{nd.id, node.id}); boolean found = false; for (Edge edge : listEdges) { if (edge.statid.equals(stat.id) && edge.edgetype == EdgeType.CONTINUE) { found = true; break; } } if (!found) { listEdges.add(new Edge(nd.id, stat.id, EdgeType.CONTINUE)); } } sourcenode = node; break; case FOR: DirectNode nodeinit = new DirectNode(DirectNodeType.INIT, stat, stat.id + "_init"); if (dostat.getInitExprent() != null) { nodeinit.exprents = dostat.getInitExprentList(); } graph.nodes.putWithKey(nodeinit, nodeinit.id); DirectNode nodecond = new DirectNode(DirectNodeType.CONDITION, stat, stat.id + "_cond"); nodecond.exprents = dostat.getConditionExprentList(); graph.nodes.putWithKey(nodecond, nodecond.id); DirectNode nodeinc = new DirectNode(DirectNodeType.INCREMENT, stat, stat.id + "_inc"); nodeinc.exprents = dostat.getIncExprentList(); graph.nodes.putWithKey(nodeinc, nodeinc.id); mapDestinationNodes.put(stat.id, new String[]{nodeinit.id, nodeinc.id}); mapDestinationNodes.put(-stat.id, new String[]{nodecond.id, null}); listEdges.add(new Edge(nodecond.id, stat.getFirst().id, EdgeType.REGULAR)); listEdges.add(new Edge(nodeinit.id, -stat.id, EdgeType.REGULAR)); listEdges.add(new Edge(nodeinc.id, -stat.id, EdgeType.REGULAR)); boolean found = false; for (Edge edge : listEdges) { if (edge.statid.equals(stat.id) && edge.edgetype == EdgeType.CONTINUE) { found = true; break; } } if (!found) { listEdges.add(new Edge(nd.id, stat.id, EdgeType.CONTINUE)); } sourcenode = nodecond; } break; case SYNCHRONIZED: case SWITCH: case IF: case SEQUENCE: case ROOT: int statsize = stat.getStats().size(); if (stat.type == StatementType.SYNCHRONIZED) { statsize = 2; // exclude the handler if synchronized } if (statementBreakIndex <= statsize) { List<Exprent> tailexprlst = null; switch (stat.type) { case SYNCHRONIZED: tailexprlst = ((SynchronizedStatement)stat).getHeadexprentList(); break; case SWITCH: tailexprlst = ((SwitchStatement)stat).getHeadExprentList(); break; case IF: tailexprlst = ((IfStatement)stat).getHeadexprentList(); } for (int i = statementBreakIndex; i < statsize; i++) { statEntry.statementIndex = i + 1; lstStackStatements.addFirst(statEntry); lstStackStatements.addFirst( new StatementStackEntry(stat.getStats().get(i), stackFinally, (i == 0 && tailexprlst != null && tailexprlst.get(0) != null) ? tailexprlst : null)); continue mainloop; } node = graph.nodes.getWithKey(mapDestinationNodes.get(stat.getFirst().id)[0]); mapDestinationNodes.put(stat.id, new String[]{node.id, null}); if (stat.type == StatementType.IF && ((IfStatement)stat).iftype == IfStatement.IFTYPE_IF) { lstSuccEdges.add(stat.getSuccessorEdges(EdgeType.DIRECT_ALL).get(0)); // exactly one edge sourcenode = tailexprlst.get(0) == null ? node : graph.nodes.getWithKey(node.id + "_tail"); } } } } // no successor edges if (sourcenode != null) { if (statEntry.succEdges != null) { lstSuccEdges = statEntry.succEdges; } for (int edgeindex = statEntry.edgeIndex; edgeindex < lstSuccEdges.size(); edgeindex++) { StatEdge edge = lstSuccEdges.get(edgeindex); LinkedList<StackEntry> stack = new LinkedList<>(stackFinally); EdgeType edgetype = edge.getType(); Statement destination = edge.getDestination(); DirectNode finallyShortRangeSource = sourcenode; DirectNode finallyLongRangeSource = sourcenode; Statement finallyShortRangeEntry = null; Statement finallyLongRangeEntry = null; boolean isFinallyMonitorExceptionPath = false; boolean isFinallyExit = false; while (true) { StackEntry entry = null; if (!stack.isEmpty()) { entry = stack.getLast(); } boolean created = true; if (entry == null) { saveEdge(sourcenode, destination, edgetype, isFinallyExit ? finallyShortRangeSource : null, finallyLongRangeSource, finallyShortRangeEntry, finallyLongRangeEntry, isFinallyMonitorExceptionPath); } else { CatchAllStatement catchall = entry.catchstatement; if (entry.state) { // finally handler statement if (edgetype == EdgeType.FINALLY_EXIT) { stack.removeLast(); destination = entry.destination; edgetype = entry.edgetype; finallyShortRangeSource = entry.finallyShortRangeSource; finallyLongRangeSource = entry.finallyLongRangeSource; finallyShortRangeEntry = entry.finallyShortRangeEntry; finallyLongRangeEntry = entry.finallyLongRangeEntry; isFinallyExit = true; isFinallyMonitorExceptionPath = (catchall.getMonitor() != null) & entry.isFinallyExceptionPath; created = false; } else { if (!catchall.containsStatementStrict(destination)) { stack.removeLast(); created = false; } else { saveEdge(sourcenode, destination, edgetype, isFinallyExit ? finallyShortRangeSource : null, finallyLongRangeSource, finallyShortRangeEntry, finallyLongRangeEntry, isFinallyMonitorExceptionPath); } } } else { // finally protected try statement if (!catchall.containsStatementStrict(destination)) { saveEdge(sourcenode, catchall.getHandler(), EdgeType.REGULAR, isFinallyExit ? finallyShortRangeSource : null, finallyLongRangeSource, finallyShortRangeEntry, finallyLongRangeEntry, isFinallyMonitorExceptionPath); stack.removeLast(); stack.add(new StackEntry(catchall, Boolean.TRUE, edgetype, destination, catchall.getHandler(), finallyLongRangeEntry == null ? catchall.getHandler() : finallyLongRangeEntry, sourcenode, finallyLongRangeSource, false)); statEntry.edgeIndex = edgeindex + 1; statEntry.succEdges = lstSuccEdges; lstStackStatements.addFirst(statEntry); lstStackStatements.addFirst(new StatementStackEntry(catchall.getHandler(), stack, null)); continue mainloop; } else { saveEdge(sourcenode, destination, edgetype, isFinallyExit ? finallyShortRangeSource : null, finallyLongRangeSource, finallyShortRangeEntry, finallyLongRangeEntry, isFinallyMonitorExceptionPath); } } } if (created) { break; } } } } } } private void saveEdge(DirectNode sourcenode, Statement destination, EdgeType edgetype, DirectNode finallyShortRangeSource, DirectNode finallyLongRangeSource, Statement finallyShortRangeEntry, Statement finallyLongRangeEntry, boolean isFinallyMonitorExceptionPath) { if (edgetype != EdgeType.FINALLY_EXIT) { listEdges.add(new Edge(sourcenode.id, destination.id, edgetype)); } if (finallyShortRangeSource != null) { boolean isContinueEdge = (edgetype == EdgeType.CONTINUE); mapShortRangeFinallyPathIds.computeIfAbsent(sourcenode.id, k -> new ArrayList<>()).add(new String[]{ finallyShortRangeSource.id, Integer.toString(destination.id), Integer.toString(finallyShortRangeEntry.id), isFinallyMonitorExceptionPath ? "1" : null, isContinueEdge ? "1" : null}); mapLongRangeFinallyPathIds.computeIfAbsent(sourcenode.id, k -> new ArrayList<>()).add(new String[]{ finallyLongRangeSource.id, Integer.toString(destination.id), Integer.toString(finallyLongRangeEntry.id), isContinueEdge ? "1" : null}); } } private void setEdges() { for (Edge edge : listEdges) { String sourceid = edge.sourceid; Integer statid = edge.statid; DirectNode source = graph.nodes.getWithKey(sourceid); DirectNode dest = graph.nodes.getWithKey(mapDestinationNodes.get(statid)[edge.edgetype == EdgeType.CONTINUE ? 1 : 0]); if (!source.successors.contains(dest)) { source.successors.add(dest); } if (!dest.predecessors.contains(source)) { dest.predecessors.add(source); } if (mapPosIfBranch.containsKey(sourceid) && !statid.equals(mapPosIfBranch.get(sourceid))) { graph.mapNegIfBranch.put(sourceid, dest.id); } } for (int i = 0; i < 2; i++) { for (Entry<String, List<String[]>> ent : (i == 0 ? mapShortRangeFinallyPathIds : mapLongRangeFinallyPathIds).entrySet()) { List<FinallyPathWrapper> newLst = new ArrayList<>(); List<String[]> lst = ent.getValue(); for (String[] arr : lst) { boolean isContinueEdge = arr[i == 0 ? 4 : 3] != null; DirectNode dest = graph.nodes.getWithKey(mapDestinationNodes.get(Integer.parseInt(arr[1]))[isContinueEdge ? 1 : 0]); DirectNode enter = graph.nodes.getWithKey(mapDestinationNodes.get(Integer.parseInt(arr[2]))[0]); newLst.add(new FinallyPathWrapper(arr[0], dest.id, enter.id)); if (i == 0 && arr[3] != null) { graph.mapFinallyMonitorExceptionPathExits.put(ent.getKey(), dest.id); } } if (!newLst.isEmpty()) { (i == 0 ? graph.mapShortRangeFinallyPaths : graph.mapLongRangeFinallyPaths).put(ent.getKey(), new ArrayList<>( new HashSet<>(newLst))); } } } } public Map<Integer, String[]> getMapDestinationNodes() { return mapDestinationNodes; } public static final class FinallyPathWrapper { public final String source; public final String destination; public final String entry; private FinallyPathWrapper(String source, String destination, String entry) { this.source = source; this.destination = destination; this.entry = entry; } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof FinallyPathWrapper)) return false; FinallyPathWrapper fpw = (FinallyPathWrapper)o; return (source + ":" + destination + ":" + entry).equals(fpw.source + ":" + fpw.destination + ":" + fpw.entry); } @Override public int hashCode() { return (source + ":" + destination + ":" + entry).hashCode(); } @Override public String toString() { return source + "->(" + entry + ")->" + destination; } } private static class StackEntry { public final CatchAllStatement catchstatement; public final boolean state; public final EdgeType edgetype; public final boolean isFinallyExceptionPath; public final Statement destination; public final Statement finallyShortRangeEntry; public final Statement finallyLongRangeEntry; public final DirectNode finallyShortRangeSource; public final DirectNode finallyLongRangeSource; StackEntry(CatchAllStatement catchstatement, boolean state, EdgeType edgetype, Statement destination, Statement finallyShortRangeEntry, Statement finallyLongRangeEntry, DirectNode finallyShortRangeSource, DirectNode finallyLongRangeSource, boolean isFinallyExceptionPath) { this.catchstatement = catchstatement; this.state = state; this.edgetype = edgetype; this.isFinallyExceptionPath = isFinallyExceptionPath; this.destination = destination; this.finallyShortRangeEntry = finallyShortRangeEntry; this.finallyLongRangeEntry = finallyLongRangeEntry; this.finallyShortRangeSource = finallyShortRangeSource; this.finallyLongRangeSource = finallyLongRangeSource; } StackEntry(CatchAllStatement catchstatement, boolean state) { this(catchstatement, state, EdgeType.NULL, null, null, null, null, null, false); } } private static class Edge { public final String sourceid; public final Integer statid; public final EdgeType edgetype; Edge(String sourceid, Integer statid, EdgeType edgetype) { this.sourceid = sourceid; this.statid = statid; this.edgetype = edgetype; } } }
package org.motechproject.server.web.helper; import org.codehaus.jackson.map.ObjectMapper; import org.motechproject.commons.api.ClassUtils; import org.motechproject.osgi.web.util.BundleHeaders; import org.osgi.framework.Bundle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import static org.apache.commons.lang.StringUtils.isNotBlank; public final class Header { private static final Logger LOGGER = LoggerFactory.getLogger(Header.class); private Header() { } public static class HeaderOrder { private List<ElementOrder> lib; private List<ElementOrder> js; private List<ElementOrder> css; public List<ElementOrder> getLib() { return lib; } public void setLib(List<ElementOrder> lib) { this.lib = lib; } public List<ElementOrder> getJs() { return js; } public void setJs(List<ElementOrder> js) { this.js = js; } public List<ElementOrder> getCss() { return css; } public void setCss(List<ElementOrder> css) { this.css = css; } } public static class ElementOrder { private String path; private String order; private String before; private String after; public String getPath() { return path; } public void setPath(String path) { this.path = path; } public String getOrder() { return order; } public void setOrder(String order) { this.order = order; } public String getBefore() { return before; } public void setBefore(String before) { this.before = before; } public String getAfter() { return after; } public void setAfter(String after) { this.after = after; } } public static String generateHeader(Bundle bundle) { InputStream stream = Header.class.getResourceAsStream("/header-order.json"); HeaderOrder order; if (null != stream) { try { order = new ObjectMapper().readValue(stream, HeaderOrder.class); } catch (IOException e) { LOGGER.error("There were problems with read header-order.json", e); order = new HeaderOrder(); } } else { order = new HeaderOrder(); } String resourcePath = new BundleHeaders(bundle).getResourcePath(); StringBuilder builder = new StringBuilder(); addCSS(builder, resourcePath, bundle, order.getCss()); addScripts(builder, bundle, resourcePath, "lib", order.getLib()); addScripts(builder, bundle, resourcePath, "js", order.getJs()); return builder.toString(); } private static void addScripts(StringBuilder builder, Bundle bundle, String resourcePath, String folderName, List<ElementOrder> order) { List<String> js = get(bundle, folderName, "*.js", order); for (String entryPath : js) { String path = "../" + resourcePath + entryPath; String script = createScript(path); builder.append(script); builder.append("\n"); } } private static void addCSS(StringBuilder builder, String resourcePath, Bundle bundle, List<ElementOrder> order) { List<String> css = get(bundle, "css", "*.css", order); for (String entryPath : css) { String path = "../" + resourcePath + entryPath; String script = createCSS(path); builder.append(script); builder.append("\n"); } builder.append("\n"); } private static List<String> get(Bundle bundle, String folderName, String filePattern, List<ElementOrder> order) { String entriesPath = String.format("/webapp/%s/", folderName); Enumeration entries = bundle.findEntries(entriesPath, filePattern, true); List<URL> urls = ClassUtils.filterByClass(URL.class, entries); List<String> paths = new ArrayList<>(urls.size()); for (URL url : urls) { paths.add(getPath(url)); } if (!paths.isEmpty() && null != order) { changeOrder(paths, order, folderName); } return paths; } private static void changeOrder(List<String> paths, List<ElementOrder> order, String folderName) { for (ElementOrder o : order) { String path = String.format("/%s/%s", folderName, o.getPath()); // remove path from list. It will be added later in the specific index paths.remove(path); if (isNotBlank(o.getOrder())) { // the resource should be in the specific place switch (o.getOrder()) { case "first": paths.add(0, path); break; case "last": paths.add(path); break; default: int idx = Integer.parseInt(o.getOrder()); paths.add(idx, path); } } else if (isNotBlank(o.getAfter())) { // the resource should be after other resource String after = String.format("/%s/%s", folderName, o.getAfter()); int idx = paths.indexOf(after); paths.add(idx + 1, path); } else if (isNotBlank(o.getBefore())) { // the resource should be before other resource String before = String.format("/%s/%s", folderName, o.getBefore()); int idx = paths.indexOf(before); paths.add(idx - 1, path); } } } private static String createScript(String url) { return String.format("<script type=\"text/javascript\" src=\"%s\"></script>", url); } private static String createCSS(String url) { return String.format("<link rel=\"stylesheet\" type=\"text/css\" href=\"%s\">", url); } private static String getPath(URL url) { String path = url.getPath(); if (path.contains("/webapp/")) { path = path.replace("/webapp/", ""); } if (!path.startsWith("/")) { path = "/" + path; } return path; } }
package org.apache.cassandra.db.columniterator; /* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.io.IOError; import java.io.IOException; import java.util.ArrayDeque; import java.util.Deque; import java.util.List; import com.google.common.collect.AbstractIterator; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.db.ColumnFamily; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.IColumn; import org.apache.cassandra.db.marshal.AbstractType; import org.apache.cassandra.io.sstable.IndexHelper; import org.apache.cassandra.io.sstable.SSTableReader; import org.apache.cassandra.io.util.FileDataInput; import org.apache.cassandra.io.util.FileMark; /** * This is a reader that finds the block for a starting column and returns * blocks before/after it for each next call. This function assumes that * the CF is sorted by name and exploits the name index. */ class IndexedSliceReader extends AbstractIterator<IColumn> implements IColumnIterator { private final ColumnFamily emptyColumnFamily; private final List<IndexHelper.IndexInfo> indexes; private final FileDataInput file; private final byte[] startColumn; private final byte[] finishColumn; private final boolean reversed; private BlockFetcher fetcher; private Deque<IColumn> blockColumns = new ArrayDeque<IColumn>(); private AbstractType comparator; public IndexedSliceReader(CFMetaData metadata, FileDataInput input, byte[] startColumn, byte[] finishColumn, boolean reversed) { this.file = input; this.startColumn = startColumn; this.finishColumn = finishColumn; this.reversed = reversed; comparator = metadata.comparator; try { IndexHelper.skipBloomFilter(file); indexes = IndexHelper.deserializeIndex(file); emptyColumnFamily = ColumnFamily.serializer().deserializeFromSSTableNoColumns(ColumnFamily.create(metadata), file); fetcher = indexes == null ? new SimpleBlockFetcher() : new IndexedBlockFetcher(); } catch (IOException e) { throw new IOError(e); } } public ColumnFamily getColumnFamily() { return emptyColumnFamily; } public DecoratedKey getKey() { throw new UnsupportedOperationException(); } private boolean isColumnNeeded(IColumn column) { if (startColumn.length == 0 && finishColumn.length == 0) return true; else if (startColumn.length == 0 && !reversed) return comparator.compare(column.name(), finishColumn) <= 0; else if (startColumn.length == 0 && reversed) return comparator.compare(column.name(), finishColumn) >= 0; else if (finishColumn.length == 0 && !reversed) return comparator.compare(column.name(), startColumn) >= 0; else if (finishColumn.length == 0 && reversed) return comparator.compare(column.name(), startColumn) <= 0; else if (!reversed) return comparator.compare(column.name(), startColumn) >= 0 && comparator.compare(column.name(), finishColumn) <= 0; else // if reversed return comparator.compare(column.name(), startColumn) <= 0 && comparator.compare(column.name(), finishColumn) >= 0; } protected IColumn computeNext() { while (true) { IColumn column = blockColumns.poll(); if (column != null && isColumnNeeded(column)) return column; try { if (column == null && !fetcher.getNextBlock()) return endOfData(); } catch (IOException e) { throw new RuntimeException(e); } } } public void close() { } interface BlockFetcher { public boolean getNextBlock() throws IOException; } private class IndexedBlockFetcher implements BlockFetcher { private final FileMark mark; private int curRangeIndex; IndexedBlockFetcher() throws IOException { file.readInt(); // column count this.mark = file.mark(); curRangeIndex = IndexHelper.indexFor(startColumn, indexes, comparator, reversed); if (reversed && curRangeIndex == indexes.size()) curRangeIndex--; } public boolean getNextBlock() throws IOException { if (curRangeIndex < 0 || curRangeIndex >= indexes.size()) return false; /* seek to the correct offset to the data, and calculate the data size */ IndexHelper.IndexInfo curColPosition = indexes.get(curRangeIndex); /* see if this read is really necessary. */ if (reversed) { if ((finishColumn.length > 0 && comparator.compare(finishColumn, curColPosition.lastName) > 0) || (startColumn.length > 0 && comparator.compare(startColumn, curColPosition.firstName) < 0)) return false; } else { if ((startColumn.length > 0 && comparator.compare(startColumn, curColPosition.lastName) > 0) || (finishColumn.length > 0 && comparator.compare(finishColumn, curColPosition.firstName) < 0)) return false; } boolean outOfBounds = false; file.reset(mark); long curOffset = file.skipBytes((int) curColPosition.offset); assert curOffset == curColPosition.offset; while (file.bytesPastMark(mark) < curColPosition.offset + curColPosition.width && !outOfBounds) { IColumn column = emptyColumnFamily.getColumnSerializer().deserialize(file); if (reversed) blockColumns.addFirst(column); else blockColumns.addLast(column); /* see if we can stop seeking. */ if (!reversed && finishColumn.length > 0) outOfBounds = comparator.compare(column.name(), finishColumn) >= 0; else if (reversed && startColumn.length > 0) outOfBounds = comparator.compare(column.name(), startColumn) >= 0; } if (reversed) curRangeIndex--; else curRangeIndex++; return true; } } private class SimpleBlockFetcher implements BlockFetcher { private SimpleBlockFetcher() throws IOException { int columns = file.readInt(); for (int i = 0; i < columns; i++) { IColumn column = emptyColumnFamily.getColumnSerializer().deserialize(file); if (reversed) blockColumns.addFirst(column); else blockColumns.addLast(column); /* see if we can stop seeking. */ boolean outOfBounds = false; if (!reversed && finishColumn.length > 0) outOfBounds = comparator.compare(column.name(), finishColumn) >= 0; else if (reversed && startColumn.length > 0) outOfBounds = comparator.compare(column.name(), startColumn) >= 0; if (outOfBounds) break; } } public boolean getNextBlock() throws IOException { return false; } } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simpleworkflow.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Provides details for the <code>ScheduleLambdaFunctionFailed</code> event. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ScheduleLambdaFunctionFailedEventAttributes implements Serializable, Cloneable, StructuredPojo { /** * <p> * The unique Amazon SWF ID of the AWS Lambda task. * </p> */ private String id; /** * <p> * The name of the scheduled AWS Lambda function. * </p> */ private String name; /** * <p> * The cause of the failure. This information is generated by the system and can be useful for diagnostic purposes. * </p> * <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it lacked sufficient * permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to * Amazon SWF Workflows</a>.</note> */ private String cause; /** * <p> * The ID of the <code>DecisionTaskCompleted</code> event corresponding to the decision that resulted in the * scheduling of this AWS Lambda function. This information can be useful for diagnosing problems by tracing back * the chain of events leading up to this event. * </p> */ private Long decisionTaskCompletedEventId; /** * <p> * The unique Amazon SWF ID of the AWS Lambda task. * </p> * * @param id * The unique Amazon SWF ID of the AWS Lambda task. */ public void setId(String id) { this.id = id; } /** * <p> * The unique Amazon SWF ID of the AWS Lambda task. * </p> * * @return The unique Amazon SWF ID of the AWS Lambda task. */ public String getId() { return this.id; } /** * <p> * The unique Amazon SWF ID of the AWS Lambda task. * </p> * * @param id * The unique Amazon SWF ID of the AWS Lambda task. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduleLambdaFunctionFailedEventAttributes withId(String id) { setId(id); return this; } /** * <p> * The name of the scheduled AWS Lambda function. * </p> * * @param name * The name of the scheduled AWS Lambda function. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the scheduled AWS Lambda function. * </p> * * @return The name of the scheduled AWS Lambda function. */ public String getName() { return this.name; } /** * <p> * The name of the scheduled AWS Lambda function. * </p> * * @param name * The name of the scheduled AWS Lambda function. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduleLambdaFunctionFailedEventAttributes withName(String name) { setName(name); return this; } /** * <p> * The cause of the failure. This information is generated by the system and can be useful for diagnostic purposes. * </p> * <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it lacked sufficient * permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to * Amazon SWF Workflows</a>.</note> * * @param cause * The cause of the failure. This information is generated by the system and can be useful for diagnostic * purposes.</p> <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it * lacked sufficient permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage * Access to Amazon SWF Workflows</a>. * @see ScheduleLambdaFunctionFailedCause */ public void setCause(String cause) { this.cause = cause; } /** * <p> * The cause of the failure. This information is generated by the system and can be useful for diagnostic purposes. * </p> * <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it lacked sufficient * permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to * Amazon SWF Workflows</a>.</note> * * @return The cause of the failure. This information is generated by the system and can be useful for diagnostic * purposes.</p> <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it * lacked sufficient permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage * Access to Amazon SWF Workflows</a>. * @see ScheduleLambdaFunctionFailedCause */ public String getCause() { return this.cause; } /** * <p> * The cause of the failure. This information is generated by the system and can be useful for diagnostic purposes. * </p> * <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it lacked sufficient * permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to * Amazon SWF Workflows</a>.</note> * * @param cause * The cause of the failure. This information is generated by the system and can be useful for diagnostic * purposes.</p> <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it * lacked sufficient permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage * Access to Amazon SWF Workflows</a>. * @return Returns a reference to this object so that method calls can be chained together. * @see ScheduleLambdaFunctionFailedCause */ public ScheduleLambdaFunctionFailedEventAttributes withCause(String cause) { setCause(cause); return this; } /** * <p> * The cause of the failure. This information is generated by the system and can be useful for diagnostic purposes. * </p> * <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it lacked sufficient * permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to * Amazon SWF Workflows</a>.</note> * * @param cause * The cause of the failure. This information is generated by the system and can be useful for diagnostic * purposes.</p> <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it * lacked sufficient permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage * Access to Amazon SWF Workflows</a>. * @see ScheduleLambdaFunctionFailedCause */ public void setCause(ScheduleLambdaFunctionFailedCause cause) { this.cause = cause.toString(); } /** * <p> * The cause of the failure. This information is generated by the system and can be useful for diagnostic purposes. * </p> * <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it lacked sufficient * permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage Access to * Amazon SWF Workflows</a>.</note> * * @param cause * The cause of the failure. This information is generated by the system and can be useful for diagnostic * purposes.</p> <note>If <b>cause</b> is set to OPERATION_NOT_PERMITTED, the decision failed because it * lacked sufficient permissions. For details and example IAM policies, see <a * href="http://docs.aws.amazon.com/amazonswf/latest/developerguide/swf-dev-iam.html">Using IAM to Manage * Access to Amazon SWF Workflows</a>. * @return Returns a reference to this object so that method calls can be chained together. * @see ScheduleLambdaFunctionFailedCause */ public ScheduleLambdaFunctionFailedEventAttributes withCause(ScheduleLambdaFunctionFailedCause cause) { setCause(cause); return this; } /** * <p> * The ID of the <code>DecisionTaskCompleted</code> event corresponding to the decision that resulted in the * scheduling of this AWS Lambda function. This information can be useful for diagnosing problems by tracing back * the chain of events leading up to this event. * </p> * * @param decisionTaskCompletedEventId * The ID of the <code>DecisionTaskCompleted</code> event corresponding to the decision that resulted in the * scheduling of this AWS Lambda function. This information can be useful for diagnosing problems by tracing * back the chain of events leading up to this event. */ public void setDecisionTaskCompletedEventId(Long decisionTaskCompletedEventId) { this.decisionTaskCompletedEventId = decisionTaskCompletedEventId; } /** * <p> * The ID of the <code>DecisionTaskCompleted</code> event corresponding to the decision that resulted in the * scheduling of this AWS Lambda function. This information can be useful for diagnosing problems by tracing back * the chain of events leading up to this event. * </p> * * @return The ID of the <code>DecisionTaskCompleted</code> event corresponding to the decision that resulted in the * scheduling of this AWS Lambda function. This information can be useful for diagnosing problems by tracing * back the chain of events leading up to this event. */ public Long getDecisionTaskCompletedEventId() { return this.decisionTaskCompletedEventId; } /** * <p> * The ID of the <code>DecisionTaskCompleted</code> event corresponding to the decision that resulted in the * scheduling of this AWS Lambda function. This information can be useful for diagnosing problems by tracing back * the chain of events leading up to this event. * </p> * * @param decisionTaskCompletedEventId * The ID of the <code>DecisionTaskCompleted</code> event corresponding to the decision that resulted in the * scheduling of this AWS Lambda function. This information can be useful for diagnosing problems by tracing * back the chain of events leading up to this event. * @return Returns a reference to this object so that method calls can be chained together. */ public ScheduleLambdaFunctionFailedEventAttributes withDecisionTaskCompletedEventId(Long decisionTaskCompletedEventId) { setDecisionTaskCompletedEventId(decisionTaskCompletedEventId); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getId() != null) sb.append("Id: ").append(getId()).append(","); if (getName() != null) sb.append("Name: ").append(getName()).append(","); if (getCause() != null) sb.append("Cause: ").append(getCause()).append(","); if (getDecisionTaskCompletedEventId() != null) sb.append("DecisionTaskCompletedEventId: ").append(getDecisionTaskCompletedEventId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ScheduleLambdaFunctionFailedEventAttributes == false) return false; ScheduleLambdaFunctionFailedEventAttributes other = (ScheduleLambdaFunctionFailedEventAttributes) obj; if (other.getId() == null ^ this.getId() == null) return false; if (other.getId() != null && other.getId().equals(this.getId()) == false) return false; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getCause() == null ^ this.getCause() == null) return false; if (other.getCause() != null && other.getCause().equals(this.getCause()) == false) return false; if (other.getDecisionTaskCompletedEventId() == null ^ this.getDecisionTaskCompletedEventId() == null) return false; if (other.getDecisionTaskCompletedEventId() != null && other.getDecisionTaskCompletedEventId().equals(this.getDecisionTaskCompletedEventId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode()); hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getCause() == null) ? 0 : getCause().hashCode()); hashCode = prime * hashCode + ((getDecisionTaskCompletedEventId() == null) ? 0 : getDecisionTaskCompletedEventId().hashCode()); return hashCode; } @Override public ScheduleLambdaFunctionFailedEventAttributes clone() { try { return (ScheduleLambdaFunctionFailedEventAttributes) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.simpleworkflow.model.transform.ScheduleLambdaFunctionFailedEventAttributesMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002,2010 Oracle. All rights reserved. * * $Id: LocalCBVLSNUpdater.java,v 1.9 2010/01/04 15:50:47 cwl Exp $ */ package com.sleepycat.je.rep.impl.node; import static com.sleepycat.je.utilint.VLSN.NULL_VLSN; import java.util.logging.Logger; import com.sleepycat.je.DatabaseException; import com.sleepycat.je.EnvironmentFailureException; import com.sleepycat.je.rep.stream.Protocol; import com.sleepycat.je.utilint.LoggerUtils; import com.sleepycat.je.utilint.VLSN; /** * Supports updating the group database with each node's local CBVLSN when it * is in the Master state. There is one instance per feeder connection, plus * one for the Master. There is, logically, a LocalCBVLSNTracker instance * associated with each instance of the updater. The instance is local for an * update associated with a node in the Master state and is remote for each * Replica. * * The nodeCBVLSN can only increase during the lifetime of the * LocalCBVLSNUpdater instance. Note however that the value of the node's * CBVLSN as stored in the database, which represents the values from multiple * updaters associated, with a node over its lifetime may both decrease and * increase over its lifetime. The decreases are due primarily to rollbacks, * and should be relatively rare. * * The updaters used to maintain the Replica's local CBVLSNs are stored in the * Feeder.InputThread. The lifetime of such a LocalCBVLSNUpdater is therefore * determined by the lifetime of the connection between the Master and the * Replica. The node CBVLSN is updated each time a heart beat response is * processed by the FeederInput thread. It's also updated when the Master * detects that a Replica needs a network restore. In this case, it updates * cbvlsn to the value expected from the node after a network restore so that * the global CBVLSN can continue to make forward progress and not hold up the * cleaner. * * The Master maintains an updater for its own CBVLSN in the FeederManager. * This updater exists as long as the node retains its Master state. * * Local CBVLSNs are used only to contribute to the calculation of the global * CBVLSN. The global CBVLSN acts as the cleaner throttle. Any invariants, such * as the rule that the cleaner throttle cannot regress, are applied when doing * the global calculation. */ public class LocalCBVLSNUpdater { private static final String VLSN_SOURCE = "vlsn"; private static final String MASTER_SOURCE = "master"; private static final String HEARTBEAT_SOURCE = "heartbeat"; /* * The node id of the node whose CBLVLSN is being tracked. If this updater * is working on the behalf o a replica node, the nameIdPair is not the * name of this node. */ private final NameIdPair nameIdPair; /* This node; note that its node id may be different from nodeId above. */ private final RepNode repNode; /* * The node's local CBVLSN is cached here, for use without reading the * group db. */ private VLSN nodeCBVLSN; /* * True if this node's local CBVLSN has changed, but the new value * has not been stored into the group db yet. */ private boolean updatePending; /* Used to suppress database updates during unit testing. */ private static boolean suppressGroupDBUpdates = false; private final Logger logger; LocalCBVLSNUpdater(NameIdPair nameIdPair, RepNode repNode) { this.nameIdPair = nameIdPair; this.repNode = repNode; nodeCBVLSN = NULL_VLSN; updatePending = false; logger = LoggerUtils.getLogger(getClass()); } /** * Sets the current CBVLSN for this node, and trips the updatePending * flag so that we know there is something to store to the RepGroupDB. * * @param syncableVLSN the new local CBVLSN * @throws InterruptedException */ private void set(VLSN syncableVLSN, String source) { assert repNode.isMaster() : "LocalCBVLSNUpdater.set() can only be called by the master"; if (!nodeCBVLSN.equals(syncableVLSN)) { LoggerUtils.fine(logger, repNode.getRepImpl(), "update local CBVLSN for " + nameIdPair + " from nodeCBVLSN " + nodeCBVLSN + " to " + syncableVLSN + " from " + source); if (nodeCBVLSN.compareTo(syncableVLSN) >= 0) { /* * LCBVLSN must not decrease, since it can result in a GCBVLSN * value that's outside a truncated VLSNIndex range. See SR * [#17343] */ throw EnvironmentFailureException.unexpectedState (repNode.getRepImpl(), "nodeCBVLSN" + nodeCBVLSN + " >= " + syncableVLSN + " attempted update local CBVLSN for " + nameIdPair + " from " + source); } nodeCBVLSN = syncableVLSN; updatePending = true; } } /** * Exercise caution when using this method. The normal mode of updating the * CBVLSN is via the heartbeat. So, if the CBVLSN is updated through the * method, ensure that it supplies an increasing CBCLSN and that it's * CBVLSN is coordinated with the one supplied by * {@link #updateForReplica(com.sleepycat.je.rep.stream.Protocol.HeartbeatResponse)} * . The two methods together, must maintain the invariant that the local * CBVLSN value must always be ascending. * * @param syncableVLSN the new local CBVLSN */ public void updateForReplica(VLSN syncableVLSN) { set(syncableVLSN, VLSN_SOURCE); update(); } /** * Sets the current CBVLSN for this node. Can only be used by the * master. The new cbvlsn value comes from an incoming heartbeat response * message. * @param heartbeat The incoming heartbeat response message from the * replica containing its newest local cbvlsn. */ public void updateForReplica(Protocol.HeartbeatResponse heartbeat) { set(heartbeat.getSyncupVLSN(), HEARTBEAT_SOURCE); update(); } /** * As a master, update the database with the local CBVLSN for this node. * This call is needed because the master's local CBVLSN will not be * broadcast via a heartbeat, so it needs to get to the updater another * way. * @throws InterruptedException */ void updateForMaster(LocalCBVLSNTracker tracker) throws InterruptedException { set(tracker.getBroadcastCBVLSN(), MASTER_SOURCE); update(); } /** * Update the database, with the local CBVLSN associated with the node Id * if required. Note that updates can only be invoked on the master * @throws InterruptedException */ private void update() { if (!updatePending) { return; } if (suppressGroupDBUpdates) { /* Short circuit the database update. For testing only. */ updatePending = false; return; } if (repNode.isShutdown()) { /* * Don't advance VLSNs after a shutdown request, to minimize the * need for a hard recovery. */ return; } try { VLSN candidate = nodeCBVLSN; if (candidate.isNull()) { return; } if (candidate.compareTo(repNode.getGroupCBVLSN()) < 0) { /* Don't let the group CBVLSN regress.*/ return; } boolean updated = repNode.repGroupDB.updateLocalCBVLSN(nameIdPair, candidate); /* If not updated, we'll try again later. */ if (updated) { updatePending = false; } } catch (EnvironmentFailureException e) { /* * Propagate environment failure exception so that the master * can shut down. */ throw e; } catch (DatabaseException e) { e.printStackTrace(); LoggerUtils.warning(repNode.logger, repNode.getRepImpl(), "local cbvlsn update failed for node: " + nameIdPair + " Error: " + e.getMessage()); } } /** * Used during testing to suppress CBVLSN updates at this node. Note that * the cleaner must also typically be turned off (first) in conjunction * with the suppression. If multiple nodes are running in the VM all nodes * will have the CBVLSN updates turned off. * @param suppressGroupDBUpdates If true, the group DB and the group CBVLSN * won't be updated at the master. */ static public void setSuppressGroupDBUpdates(boolean suppressGroupDBUpdates) { LocalCBVLSNUpdater.suppressGroupDBUpdates = suppressGroupDBUpdates; } /* For unit testing */ static boolean getSuppressGroupDBUpdates() { return suppressGroupDBUpdates; } }
package org.knowm.xchange.bittrex; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Optional; import org.knowm.xchange.bittrex.dto.account.BittrexBalance; import org.knowm.xchange.bittrex.dto.account.BittrexDepositHistory; import org.knowm.xchange.bittrex.dto.account.BittrexWithdrawalHistory; import org.knowm.xchange.bittrex.dto.marketdata.BittrexLevel; import org.knowm.xchange.bittrex.dto.marketdata.BittrexMarketSummary; import org.knowm.xchange.bittrex.dto.marketdata.BittrexSymbol; import org.knowm.xchange.bittrex.dto.marketdata.BittrexTrade; import org.knowm.xchange.bittrex.dto.trade.BittrexOpenOrder; import org.knowm.xchange.bittrex.dto.trade.BittrexOrder; import org.knowm.xchange.bittrex.dto.trade.BittrexOrderBase; import org.knowm.xchange.bittrex.dto.trade.BittrexUserTrade; import org.knowm.xchange.currency.Currency; import org.knowm.xchange.currency.CurrencyPair; import org.knowm.xchange.dto.Order.OrderStatus; import org.knowm.xchange.dto.Order.OrderType; import org.knowm.xchange.dto.account.Balance; import org.knowm.xchange.dto.account.FundingRecord; import org.knowm.xchange.dto.account.Wallet; import org.knowm.xchange.dto.marketdata.Ticker; import org.knowm.xchange.dto.marketdata.Trade; import org.knowm.xchange.dto.marketdata.Trades; import org.knowm.xchange.dto.marketdata.Trades.TradeSortType; import org.knowm.xchange.dto.meta.CurrencyMetaData; import org.knowm.xchange.dto.meta.CurrencyPairMetaData; import org.knowm.xchange.dto.meta.ExchangeMetaData; import org.knowm.xchange.dto.trade.LimitOrder; import org.knowm.xchange.dto.trade.UserTrade; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public final class BittrexAdapters { public static final Logger log = LoggerFactory.getLogger(BittrexAdapters.class); private BittrexAdapters() {} public static List<CurrencyPair> adaptCurrencyPairs(Collection<BittrexSymbol> bittrexSymbol) { List<CurrencyPair> currencyPairs = new ArrayList<>(); for (BittrexSymbol symbol : bittrexSymbol) { currencyPairs.add(adaptCurrencyPair(symbol)); } return currencyPairs; } public static CurrencyPair adaptCurrencyPair(BittrexSymbol bittrexSymbol) { String baseSymbol = bittrexSymbol.getMarketCurrency(); String counterSymbol = bittrexSymbol.getBaseCurrency(); return new CurrencyPair(baseSymbol, counterSymbol); } public static List<LimitOrder> adaptOpenOrders(List<BittrexOpenOrder> bittrexOpenOrders) { List<LimitOrder> openOrders = new ArrayList<>(); for (BittrexOpenOrder order : bittrexOpenOrders) { openOrders.add(adaptOrder(order)); } return openOrders; } public static LimitOrder adaptOrder(BittrexOrderBase order, OrderStatus status) { OrderType type = order.getOrderType().equalsIgnoreCase("LIMIT_SELL") ? OrderType.ASK : OrderType.BID; String[] currencies = order.getExchange().split("-"); CurrencyPair pair = new CurrencyPair(currencies[1], currencies[0]); return new LimitOrder.Builder(type, pair) .originalAmount(order.getQuantity()) .id(order.getOrderUuid()) .timestamp(order.getOpened()) .limitPrice(order.getLimit()) .averagePrice(order.getPricePerUnit()) .cumulativeAmount( order.getQuantityRemaining() == null ? null : order.getQuantity().subtract(order.getQuantityRemaining())) .fee(order.getCommissionPaid()) .orderStatus(status) .build(); } public static List<LimitOrder> adaptOrders( BittrexLevel[] orders, CurrencyPair currencyPair, String orderType, String id, int depth) { if (orders == null) { return new ArrayList<>(); } List<LimitOrder> limitOrders = new ArrayList<>(orders.length); for (int i = 0; i < Math.min(orders.length, depth); i++) { BittrexLevel order = orders[i]; limitOrders.add(adaptOrder(order.getAmount(), order.getPrice(), currencyPair, orderType, id)); } return limitOrders; } public static LimitOrder adaptOrder( BigDecimal amount, BigDecimal price, CurrencyPair currencyPair, String orderTypeString, String id) { OrderType orderType = orderTypeString.equalsIgnoreCase("bid") ? OrderType.BID : OrderType.ASK; return new LimitOrder(orderType, amount, currencyPair, id, null, price); } public static LimitOrder adaptOrder(BittrexOrder order) { return adaptOrder(order, adaptOrderStatus(order)); } public static LimitOrder adaptOrder(BittrexOpenOrder order) { return adaptOrder(order, adaptOrderStatus(order)); } private static OrderStatus adaptOrderStatus(BittrexOrder order) { OrderStatus status = OrderStatus.NEW; BigDecimal qty = order.getQuantity(); BigDecimal qtyRem = order.getQuantityRemaining() != null ? order.getQuantityRemaining() : order.getQuantity(); Boolean isOpen = order.getOpen(); Boolean isCancelling = order.getCancelInitiated(); int qtyRemainingToQty = qtyRem.compareTo(qty); int qtyRemainingIsZero = qtyRem.compareTo(BigDecimal.ZERO); if (isOpen && !isCancelling && qtyRemainingToQty < 0) { /* The order is open and remaining quantity less than order quantity */ status = OrderStatus.PARTIALLY_FILLED; } else if (!isOpen && !isCancelling && qtyRemainingIsZero <= 0) { /* The order is closed and remaining quantity is zero */ status = OrderStatus.FILLED; } else if (isOpen && isCancelling) { /* The order is open and the isCancelling flag has been set */ status = OrderStatus.PENDING_CANCEL; } else if (!isOpen && isCancelling) { /* The order is closed and the isCancelling flag has been set */ status = OrderStatus.CANCELED; } return status; } private static OrderStatus adaptOrderStatus(BittrexOpenOrder order) { OrderStatus status = OrderStatus.NEW; BigDecimal qty = order.getQuantity(); BigDecimal qtyRem = order.getQuantityRemaining() != null ? order.getQuantityRemaining() : order.getQuantity(); Boolean isCancelling = order.getCancelInitiated(); int qtyRemainingToQty = qtyRem.compareTo(qty); if (!isCancelling && qtyRemainingToQty < 0) { /* The order is open and remaining quantity less than order quantity */ status = OrderStatus.PARTIALLY_FILLED; } else if (isCancelling) { /* The order is open and the isCancelling flag has been set */ status = OrderStatus.PENDING_CANCEL; } return status; } public static Trade adaptTrade(BittrexTrade trade, CurrencyPair currencyPair) { OrderType orderType = trade.getOrderType().equalsIgnoreCase("BUY") ? OrderType.BID : OrderType.ASK; BigDecimal amount = trade.getQuantity(); BigDecimal price = trade.getPrice(); Date date = BittrexUtils.toDate(trade.getTimeStamp()); final String tradeId = String.valueOf(trade.getId()); return new Trade.Builder() .type(orderType) .originalAmount(amount) .currencyPair(currencyPair) .price(price) .timestamp(date) .id(tradeId) .build(); } public static Trades adaptTrades(List<BittrexTrade> trades, CurrencyPair currencyPair) { List<Trade> tradesList = new ArrayList<>(trades.size()); long lastTradeId = 0; for (BittrexTrade trade : trades) { long tradeId = Long.valueOf(trade.getId()); if (tradeId > lastTradeId) { lastTradeId = tradeId; } tradesList.add(adaptTrade(trade, currencyPair)); } return new Trades(tradesList, lastTradeId, TradeSortType.SortByID); } public static Ticker adaptTicker(BittrexMarketSummary marketSummary, CurrencyPair currencyPair) { BigDecimal last = marketSummary.getLast(); BigDecimal bid = marketSummary.getBid(); BigDecimal ask = marketSummary.getAsk(); BigDecimal high = marketSummary.getHigh(); BigDecimal low = marketSummary.getLow(); BigDecimal volume = marketSummary.getVolume(); Date timestamp = BittrexUtils.toDate(marketSummary.getTimeStamp()); return new Ticker.Builder() .currencyPair(currencyPair) .last(last) .bid(bid) .ask(ask) .high(high) .low(low) .volume(volume) .timestamp(timestamp) .build(); } protected static BigDecimal calculateFrozenBalance(BittrexBalance balance) { if (balance.getBalance() == null) { return BigDecimal.ZERO; } final BigDecimal[] frozenBalance = {balance.getBalance()}; Optional.ofNullable(balance.getAvailable()) .ifPresent(available -> frozenBalance[0] = frozenBalance[0].subtract(available)); Optional.ofNullable(balance.getPending()) .ifPresent(pending -> frozenBalance[0] = frozenBalance[0].subtract(pending)); return frozenBalance[0]; } public static Wallet adaptWallet(List<BittrexBalance> balances) { List<Balance> wallets = new ArrayList<>(balances.size()); for (BittrexBalance balance : balances) { wallets.add( new Balance( Currency.getInstance(balance.getCurrency().toUpperCase()), Optional.ofNullable(balance.getBalance()).orElse(BigDecimal.ZERO), Optional.ofNullable(balance.getAvailable()).orElse(BigDecimal.ZERO), calculateFrozenBalance(balance), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, Optional.ofNullable(balance.getPending()).orElse(BigDecimal.ZERO))); } return Wallet.Builder.from(wallets).build(); } public static Balance adaptBalance(BittrexBalance balance) { return new Balance( Currency.getInstance(balance.getCurrency().toUpperCase()), Optional.ofNullable(balance.getBalance()).orElse(BigDecimal.ZERO), Optional.ofNullable(balance.getAvailable()).orElse(BigDecimal.ZERO), calculateFrozenBalance(balance), BigDecimal.ZERO, BigDecimal.ZERO, BigDecimal.ZERO, Optional.ofNullable(balance.getPending()).orElse(BigDecimal.ZERO)); } public static List<UserTrade> adaptUserTrades(List<BittrexUserTrade> bittrexUserTrades) { List<UserTrade> trades = new ArrayList<>(); for (BittrexUserTrade bittrexTrade : bittrexUserTrades) { if (!isOrderWithoutTrade(bittrexTrade)) { trades.add(adaptUserTrade(bittrexTrade)); } } return trades; } public static UserTrade adaptUserTrade(BittrexUserTrade trade) { String[] currencies = trade.getExchange().split("-"); CurrencyPair currencyPair = new CurrencyPair(currencies[1], currencies[0]); OrderType orderType = trade.getOrderType().equalsIgnoreCase("LIMIT_BUY") ? OrderType.BID : OrderType.ASK; BigDecimal amount = trade.getQuantity().subtract(trade.getQuantityRemaining()); Date date = BittrexUtils.toDate(trade.getClosed()); String orderId = String.valueOf(trade.getOrderUuid()); BigDecimal price = trade.getPricePerUnit(); if (price == null) { price = trade.getLimit(); } return new UserTrade.Builder() .type(orderType) .originalAmount(amount) .currencyPair(currencyPair) .price(price) .timestamp(date) .id(orderId) .orderId(orderId) .feeAmount(trade.getCommission()) .feeCurrency(currencyPair.counter) .build(); } public static ExchangeMetaData adaptMetaData( List<BittrexSymbol> rawSymbols, ExchangeMetaData metaData) { List<CurrencyPair> currencyPairs = BittrexAdapters.adaptCurrencyPairs(rawSymbols); Map<CurrencyPair, CurrencyPairMetaData> pairsMap = metaData.getCurrencyPairs(); Map<Currency, CurrencyMetaData> currenciesMap = metaData.getCurrencies(); for (CurrencyPair c : currencyPairs) { if (!pairsMap.containsKey(c)) { pairsMap.put(c, null); } if (!currenciesMap.containsKey(c.base)) { currenciesMap.put(c.base, null); } if (!currenciesMap.containsKey(c.counter)) { currenciesMap.put(c.counter, null); } } return metaData; } public static List<FundingRecord> adaptDepositRecords( List<BittrexDepositHistory> bittrexFundingHistories) { final ArrayList<FundingRecord> fundingRecords = new ArrayList<>(); for (BittrexDepositHistory f : bittrexFundingHistories) { if (f != null) { fundingRecords.add( new FundingRecord( f.getCryptoAddress(), f.getLastUpdated(), Currency.getInstance(f.getCurrency()), f.getAmount(), String.valueOf(f.getId()), f.getTxId(), FundingRecord.Type.DEPOSIT, FundingRecord.Status.COMPLETE, null, null, null)); } } return fundingRecords; } private static FundingRecord.Status fromWithdrawalRecord( BittrexWithdrawalHistory bittrexWithdrawal) { if (bittrexWithdrawal.getCanceled()) return FundingRecord.Status.CANCELLED; if (bittrexWithdrawal.getInvalidAddress()) return FundingRecord.Status.FAILED; if (bittrexWithdrawal.getPendingPayment()) return FundingRecord.Status.PROCESSING; if (bittrexWithdrawal.getAuthorized()) return FundingRecord.Status.COMPLETE; return FundingRecord.Status.FAILED; } public static List<FundingRecord> adaptWithdrawalRecords( List<BittrexWithdrawalHistory> bittrexFundingHistories) { final ArrayList<FundingRecord> fundingRecords = new ArrayList<>(); for (BittrexWithdrawalHistory f : bittrexFundingHistories) { if (f != null) { final FundingRecord.Status status = fromWithdrawalRecord(f); fundingRecords.add( new FundingRecord( f.getAddress(), f.getOpened(), Currency.getInstance(f.getCurrency()), f.getAmount(), f.getPaymentUuid(), f.getTxId(), FundingRecord.Type.WITHDRAWAL, status, null, f.getTxCost(), null)); } } return fundingRecords; } private static boolean isOrderWithoutTrade(BittrexUserTrade bittrexTrade) { return bittrexTrade.getQuantity().compareTo(bittrexTrade.getQuantityRemaining()) == 0; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.ingest; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.TemplateScript; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Date; import java.util.EnumMap; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; /** * Represents a single document being captured before indexing and holds the source and metadata (like id, type and index). */ public final class IngestDocument { public static final String INGEST_KEY = "_ingest"; private static final String INGEST_KEY_PREFIX = INGEST_KEY + "."; private static final String SOURCE_PREFIX = SourceFieldMapper.NAME + "."; static final String TIMESTAMP = "timestamp"; private final Map<String, Object> sourceAndMetadata; private final Map<String, Object> ingestMetadata; public IngestDocument(String index, String type, String id, String routing, String parent, Map<String, Object> source) { this.sourceAndMetadata = new HashMap<>(); this.sourceAndMetadata.putAll(source); this.sourceAndMetadata.put(MetaData.INDEX.getFieldName(), index); this.sourceAndMetadata.put(MetaData.TYPE.getFieldName(), type); this.sourceAndMetadata.put(MetaData.ID.getFieldName(), id); if (routing != null) { this.sourceAndMetadata.put(MetaData.ROUTING.getFieldName(), routing); } if (parent != null) { this.sourceAndMetadata.put(MetaData.PARENT.getFieldName(), parent); } this.ingestMetadata = new HashMap<>(); this.ingestMetadata.put(TIMESTAMP, ZonedDateTime.now(ZoneOffset.UTC)); } /** * Copy constructor that creates a new {@link IngestDocument} which has exactly the same properties as the one provided as argument */ public IngestDocument(IngestDocument other) { this(deepCopyMap(other.sourceAndMetadata), deepCopyMap(other.ingestMetadata)); } /** * Constructor needed for testing that allows to create a new {@link IngestDocument} given the provided elasticsearch metadata, * source and ingest metadata. This is needed because the ingest metadata will be initialized with the current timestamp at * init time, which makes equality comparisons impossible in tests. */ public IngestDocument(Map<String, Object> sourceAndMetadata, Map<String, Object> ingestMetadata) { this.sourceAndMetadata = sourceAndMetadata; this.ingestMetadata = ingestMetadata; } /** * Returns the value contained in the document for the provided path * @param path The path within the document in dot-notation * @param clazz The expected class of the field value * @return the value for the provided path if existing, null otherwise * @throws IllegalArgumentException if the path is null, empty, invalid, if the field doesn't exist * or if the field that is found at the provided path is not of the expected type. */ public <T> T getFieldValue(String path, Class<T> clazz) { FieldPath fieldPath = new FieldPath(path); Object context = fieldPath.initialContext; for (String pathElement : fieldPath.pathElements) { context = resolve(pathElement, path, context); } return cast(path, context, clazz); } /** * Returns the value contained in the document for the provided path * * @param path The path within the document in dot-notation * @param clazz The expected class of the field value * @param ignoreMissing The flag to determine whether to throw an exception when `path` is not found in the document. * @return the value for the provided path if existing, null otherwise. * @throws IllegalArgumentException only if ignoreMissing is false and the path is null, empty, invalid, if the field doesn't exist * or if the field that is found at the provided path is not of the expected type. */ public <T> T getFieldValue(String path, Class<T> clazz, boolean ignoreMissing) { try { return getFieldValue(path, clazz); } catch (IllegalArgumentException e) { if (ignoreMissing && hasField(path) != true) { return null; } else { throw e; } } } /** * Returns the value contained in the document with the provided templated path * @param pathTemplate The path within the document in dot-notation * @param clazz The expected class fo the field value * @return the value fro the provided path if existing, null otherwise * @throws IllegalArgumentException if the pathTemplate is null, empty, invalid, if the field doesn't exist, * or if the field that is found at the provided path is not of the expected type. */ public <T> T getFieldValue(TemplateScript.Factory pathTemplate, Class<T> clazz) { return getFieldValue(renderTemplate(pathTemplate), clazz); } /** * Returns the value contained in the document for the provided path as a byte array. * If the path value is a string, a base64 decode operation will happen. * If the path value is a byte array, it is just returned * @param path The path within the document in dot-notation * @return the byte array for the provided path if existing * @throws IllegalArgumentException if the path is null, empty, invalid, if the field doesn't exist * or if the field that is found at the provided path is not of the expected type. */ public byte[] getFieldValueAsBytes(String path) { return getFieldValueAsBytes(path, false); } /** * Returns the value contained in the document for the provided path as a byte array. * If the path value is a string, a base64 decode operation will happen. * If the path value is a byte array, it is just returned * @param path The path within the document in dot-notation * @param ignoreMissing The flag to determine whether to throw an exception when `path` is not found in the document. * @return the byte array for the provided path if existing * @throws IllegalArgumentException if the path is null, empty, invalid, if the field doesn't exist * or if the field that is found at the provided path is not of the expected type. */ public byte[] getFieldValueAsBytes(String path, boolean ignoreMissing) { Object object = getFieldValue(path, Object.class, ignoreMissing); if (object == null) { return null; } else if (object instanceof byte[]) { return (byte[]) object; } else if (object instanceof String) { return Base64.getDecoder().decode(object.toString()); } else { throw new IllegalArgumentException("Content field [" + path + "] of unknown type [" + object.getClass().getName() + "], must be string or byte array"); } } /** * Checks whether the document contains a value for the provided templated path * @param fieldPathTemplate the template for the path within the document in dot-notation * @return true if the document contains a value for the field, false otherwise * @throws IllegalArgumentException if the path is null, empty or invalid */ public boolean hasField(TemplateScript.Factory fieldPathTemplate) { return hasField(renderTemplate(fieldPathTemplate)); } /** * Checks whether the document contains a value for the provided path * @param path The path within the document in dot-notation * @return true if the document contains a value for the field, false otherwise * @throws IllegalArgumentException if the path is null, empty or invalid. */ public boolean hasField(String path) { return hasField(path, false); } /** * Checks whether the document contains a value for the provided path * @param path The path within the document in dot-notation * @param failOutOfRange Whether to throw an IllegalArgumentException if array is accessed outside of its range * @return true if the document contains a value for the field, false otherwise * @throws IllegalArgumentException if the path is null, empty or invalid. */ public boolean hasField(String path, boolean failOutOfRange) { FieldPath fieldPath = new FieldPath(path); Object context = fieldPath.initialContext; for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { String pathElement = fieldPath.pathElements[i]; if (context == null) { return false; } if (context instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) context; context = map.get(pathElement); } else if (context instanceof List) { @SuppressWarnings("unchecked") List<Object> list = (List<Object>) context; try { int index = Integer.parseInt(pathElement); if (index < 0 || index >= list.size()) { if (failOutOfRange) { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path +"]"); } else { return false; } } context = list.get(index); } catch (NumberFormatException e) { return false; } } else { return false; } } String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1]; if (context instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) context; return map.containsKey(leafKey); } if (context instanceof List) { @SuppressWarnings("unchecked") List<Object> list = (List<Object>) context; try { int index = Integer.parseInt(leafKey); if (index >= 0 && index < list.size()) { return true; } else { if (failOutOfRange) { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path +"]"); } else { return false; } } } catch (NumberFormatException e) { return false; } } return false; } /** * Removes the field identified by the provided path. * @param fieldPathTemplate Resolves to the path with dot-notation within the document * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. */ public void removeField(TemplateScript.Factory fieldPathTemplate) { removeField(renderTemplate(fieldPathTemplate)); } /** * Removes the field identified by the provided path. * @param path the path of the field to be removed * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. */ public void removeField(String path) { FieldPath fieldPath = new FieldPath(path); Object context = fieldPath.initialContext; for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { context = resolve(fieldPath.pathElements[i], path, context); } String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1]; if (context instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) context; if (map.containsKey(leafKey)) { map.remove(leafKey); return; } throw new IllegalArgumentException("field [" + leafKey + "] not present as part of path [" + path + "]"); } if (context instanceof List) { @SuppressWarnings("unchecked") List<Object> list = (List<Object>) context; int index; try { index = Integer.parseInt(leafKey); } catch (NumberFormatException e) { throw new IllegalArgumentException("[" + leafKey + "] is not an integer, cannot be used as an index as part of path [" + path + "]", e); } if (index < 0 || index >= list.size()) { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); } list.remove(index); return; } if (context == null) { throw new IllegalArgumentException("cannot remove [" + leafKey + "] from null as part of path [" + path + "]"); } throw new IllegalArgumentException("cannot remove [" + leafKey + "] from object of type [" + context.getClass().getName() + "] as part of path [" + path + "]"); } private static Object resolve(String pathElement, String fullPath, Object context) { if (context == null) { throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from null as part of path [" + fullPath + "]"); } if (context instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) context; if (map.containsKey(pathElement)) { return map.get(pathElement); } throw new IllegalArgumentException("field [" + pathElement + "] not present as part of path [" + fullPath + "]"); } if (context instanceof List) { @SuppressWarnings("unchecked") List<Object> list = (List<Object>) context; int index; try { index = Integer.parseInt(pathElement); } catch (NumberFormatException e) { throw new IllegalArgumentException("[" + pathElement + "] is not an integer, cannot be used as an index as part of path [" + fullPath + "]", e); } if (index < 0 || index >= list.size()) { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + fullPath + "]"); } return list.get(index); } throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from object of type [" + context.getClass().getName() + "] as part of path [" + fullPath + "]"); } /** * Appends the provided value to the provided path in the document. * Any non existing path element will be created. * If the path identifies a list, the value will be appended to the existing list. * If the path identifies a scalar, the scalar will be converted to a list and * the provided value will be added to the newly created list. * Supports multiple values too provided in forms of list, in that case all the values will be appended to the * existing (or newly created) list. * @param path The path within the document in dot-notation * @param value The value or values to append to the existing ones * @throws IllegalArgumentException if the path is null, empty or invalid. */ public void appendFieldValue(String path, Object value) { setFieldValue(path, value, true); } /** * Appends the provided value to the provided path in the document. * Any non existing path element will be created. * If the path identifies a list, the value will be appended to the existing list. * If the path identifies a scalar, the scalar will be converted to a list and * the provided value will be added to the newly created list. * Supports multiple values too provided in forms of list, in that case all the values will be appended to the * existing (or newly created) list. * @param fieldPathTemplate Resolves to the path with dot-notation within the document * @param valueSource The value source that will produce the value or values to append to the existing ones * @throws IllegalArgumentException if the path is null, empty or invalid. */ public void appendFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource valueSource) { Map<String, Object> model = createTemplateModel(); appendFieldValue(fieldPathTemplate.newInstance(model).execute(), valueSource.copyAndResolve(model)); } /** * Sets the provided value to the provided path in the document. * Any non existing path element will be created. * If the last item in the path is a list, the value will replace the existing list as a whole. * Use {@link #appendFieldValue(String, Object)} to append values to lists instead. * @param path The path within the document in dot-notation * @param value The value to put in for the path key * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the * item identified by the provided path. */ public void setFieldValue(String path, Object value) { setFieldValue(path, value, false); } /** * Sets the provided value to the provided path in the document. * Any non existing path element will be created. If the last element is a list, * the value will replace the existing list. * @param fieldPathTemplate Resolves to the path with dot-notation within the document * @param valueSource The value source that will produce the value to put in for the path key * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the * item identified by the provided path. */ public void setFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource valueSource) { Map<String, Object> model = createTemplateModel(); setFieldValue(fieldPathTemplate.newInstance(model).execute(), valueSource.copyAndResolve(model), false); } private void setFieldValue(String path, Object value, boolean append) { FieldPath fieldPath = new FieldPath(path); Object context = fieldPath.initialContext; for (int i = 0; i < fieldPath.pathElements.length - 1; i++) { String pathElement = fieldPath.pathElements[i]; if (context == null) { throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from null as part of path [" + path + "]"); } if (context instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) context; if (map.containsKey(pathElement)) { context = map.get(pathElement); } else { HashMap<Object, Object> newMap = new HashMap<>(); map.put(pathElement, newMap); context = newMap; } } else if (context instanceof List) { @SuppressWarnings("unchecked") List<Object> list = (List<Object>) context; int index; try { index = Integer.parseInt(pathElement); } catch (NumberFormatException e) { throw new IllegalArgumentException("[" + pathElement + "] is not an integer, cannot be used as an index as part of path [" + path + "]", e); } if (index < 0 || index >= list.size()) { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); } context = list.get(index); } else { throw new IllegalArgumentException("cannot resolve [" + pathElement + "] from object of type [" + context.getClass().getName() + "] as part of path [" + path + "]"); } } String leafKey = fieldPath.pathElements[fieldPath.pathElements.length - 1]; if (context == null) { throw new IllegalArgumentException("cannot set [" + leafKey + "] with null parent as part of path [" + path + "]"); } if (context instanceof Map) { @SuppressWarnings("unchecked") Map<String, Object> map = (Map<String, Object>) context; if (append) { if (map.containsKey(leafKey)) { Object object = map.get(leafKey); List<Object> list = appendValues(object, value); if (list != object) { map.put(leafKey, list); } } else { List<Object> list = new ArrayList<>(); appendValues(list, value); map.put(leafKey, list); } return; } map.put(leafKey, value); } else if (context instanceof List) { @SuppressWarnings("unchecked") List<Object> list = (List<Object>) context; int index; try { index = Integer.parseInt(leafKey); } catch (NumberFormatException e) { throw new IllegalArgumentException("[" + leafKey + "] is not an integer, cannot be used as an index as part of path [" + path + "]", e); } if (index < 0 || index >= list.size()) { throw new IllegalArgumentException("[" + index + "] is out of bounds for array with length [" + list.size() + "] as part of path [" + path + "]"); } if (append) { Object object = list.get(index); List<Object> newList = appendValues(object, value); if (newList != object) { list.set(index, newList); } return; } list.set(index, value); } else { throw new IllegalArgumentException("cannot set [" + leafKey + "] with parent object of type [" + context.getClass().getName() + "] as part of path [" + path + "]"); } } @SuppressWarnings("unchecked") private static List<Object> appendValues(Object maybeList, Object value) { List<Object> list; if (maybeList instanceof List) { //maybeList is already a list, we append the provided values to it list = (List<Object>) maybeList; } else { //maybeList is a scalar, we convert it to a list and append the provided values to it list = new ArrayList<>(); list.add(maybeList); } appendValues(list, value); return list; } private static void appendValues(List<Object> list, Object value) { if (value instanceof List) { List<?> valueList = (List<?>) value; valueList.stream().forEach(list::add); } else { list.add(value); } } private static <T> T cast(String path, Object object, Class<T> clazz) { if (object == null) { return null; } if (clazz.isInstance(object)) { return clazz.cast(object); } throw new IllegalArgumentException("field [" + path + "] of type [" + object.getClass().getName() + "] cannot be cast to [" + clazz.getName() + "]"); } public String renderTemplate(TemplateScript.Factory template) { return template.newInstance(createTemplateModel()).execute(); } private Map<String, Object> createTemplateModel() { Map<String, Object> model = new HashMap<>(sourceAndMetadata); model.put(SourceFieldMapper.NAME, sourceAndMetadata); // If there is a field in the source with the name '_ingest' it gets overwritten here, // if access to that field is required then it get accessed via '_source._ingest' model.put(INGEST_KEY, ingestMetadata); return model; } /** * one time operation that extracts the metadata fields from the ingest document and returns them. * Metadata fields that used to be accessible as ordinary top level fields will be removed as part of this call. */ public Map<MetaData, String> extractMetadata() { Map<MetaData, String> metadataMap = new EnumMap<>(MetaData.class); for (MetaData metaData : MetaData.values()) { metadataMap.put(metaData, cast(metaData.getFieldName(), sourceAndMetadata.remove(metaData.getFieldName()), String.class)); } return metadataMap; } /** * Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones. * Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} */ public Map<String, Object> getIngestMetadata() { return this.ingestMetadata; } /** * Returns the document including its metadata fields, unless {@link #extractMetadata()} has been called, in which case the * metadata fields will not be present anymore. * Modify the document instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} */ public Map<String, Object> getSourceAndMetadata() { return this.sourceAndMetadata; } @SuppressWarnings("unchecked") private static <K, V> Map<K, V> deepCopyMap(Map<K, V> source) { return (Map<K, V>) deepCopy(source); } private static Object deepCopy(Object value) { if (value instanceof Map) { Map<?, ?> mapValue = (Map<?, ?>) value; Map<Object, Object> copy = new HashMap<>(mapValue.size()); for (Map.Entry<?, ?> entry : mapValue.entrySet()) { copy.put(entry.getKey(), deepCopy(entry.getValue())); } return copy; } else if (value instanceof List) { List<?> listValue = (List<?>) value; List<Object> copy = new ArrayList<>(listValue.size()); for (Object itemValue : listValue) { copy.add(deepCopy(itemValue)); } return copy; } else if (value instanceof byte[]) { byte[] bytes = (byte[]) value; return Arrays.copyOf(bytes, bytes.length); } else if (value == null || value instanceof String || value instanceof Integer || value instanceof Long || value instanceof Float || value instanceof Double || value instanceof Boolean || value instanceof ZonedDateTime) { return value; } else if (value instanceof Date) { return ((Date) value).clone(); } else { throw new IllegalArgumentException("unexpected value type [" + value.getClass() + "]"); } } @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } IngestDocument other = (IngestDocument) obj; return Objects.equals(sourceAndMetadata, other.sourceAndMetadata) && Objects.equals(ingestMetadata, other.ingestMetadata); } @Override public int hashCode() { return Objects.hash(sourceAndMetadata, ingestMetadata); } @Override public String toString() { return "IngestDocument{" + " sourceAndMetadata=" + sourceAndMetadata + ", ingestMetadata=" + ingestMetadata + '}'; } public enum MetaData { INDEX(IndexFieldMapper.NAME), TYPE(TypeFieldMapper.NAME), ID(IdFieldMapper.NAME), ROUTING(RoutingFieldMapper.NAME), PARENT(ParentFieldMapper.NAME); private final String fieldName; MetaData(String fieldName) { this.fieldName = fieldName; } public String getFieldName() { return fieldName; } } private class FieldPath { private final String[] pathElements; private final Object initialContext; private FieldPath(String path) { if (Strings.isEmpty(path)) { throw new IllegalArgumentException("path cannot be null nor empty"); } String newPath; if (path.startsWith(INGEST_KEY_PREFIX)) { initialContext = ingestMetadata; newPath = path.substring(INGEST_KEY_PREFIX.length(), path.length()); } else { initialContext = sourceAndMetadata; if (path.startsWith(SOURCE_PREFIX)) { newPath = path.substring(SOURCE_PREFIX.length(), path.length()); } else { newPath = path; } } this.pathElements = newPath.split("\\."); if (pathElements.length == 1 && pathElements[0].isEmpty()) { throw new IllegalArgumentException("path [" + path + "] is not valid"); } } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.andes.framing; import org.wso2.andes.AMQException; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; import java.util.Arrays; public class ProtocolInitiation extends AMQDataBlock implements EncodableAMQDataBlock { // TODO: generate these constants automatically from the xml protocol spec file private static final byte[] AMQP_HEADER = new byte[]{(byte)'A',(byte)'M',(byte)'Q',(byte)'P'}; private static final byte CURRENT_PROTOCOL_CLASS = 1; private static final byte TCP_PROTOCOL_INSTANCE = 1; public final byte[] _protocolHeader; public final byte _protocolClass; public final byte _protocolInstance; public final byte _protocolMajor; public final byte _protocolMinor; // public ProtocolInitiation() {} public ProtocolInitiation(byte[] protocolHeader, byte protocolClass, byte protocolInstance, byte protocolMajor, byte protocolMinor) { _protocolHeader = protocolHeader; _protocolClass = protocolClass; _protocolInstance = protocolInstance; _protocolMajor = protocolMajor; _protocolMinor = protocolMinor; } public ProtocolInitiation(ProtocolVersion pv) { this(AMQP_HEADER, pv.equals(ProtocolVersion.v0_91) ? 0 : CURRENT_PROTOCOL_CLASS, pv.equals(ProtocolVersion.v0_91) ? 0 : TCP_PROTOCOL_INSTANCE, pv.equals(ProtocolVersion.v0_91) ? 9 : pv.getMajorVersion(), pv.equals(ProtocolVersion.v0_91) ? 1 : pv.getMinorVersion()); } public ProtocolInitiation(ByteBuffer in) { _protocolHeader = new byte[4]; in.get(_protocolHeader); _protocolClass = in.get(); _protocolInstance = in.get(); _protocolMajor = in.get(); _protocolMinor = in.get(); } public void writePayload(org.apache.mina.common.ByteBuffer buffer) { writePayload(buffer.buf()); } public long getSize() { return 4 + 1 + 1 + 1 + 1; } public void writePayload(ByteBuffer buffer) { buffer.put(_protocolHeader); buffer.put(_protocolClass); buffer.put(_protocolInstance); buffer.put(_protocolMajor); buffer.put(_protocolMinor); } public boolean equals(Object o) { if (!(o instanceof ProtocolInitiation)) { return false; } ProtocolInitiation pi = (ProtocolInitiation) o; if (pi._protocolHeader == null) { return false; } if (_protocolHeader.length != pi._protocolHeader.length) { return false; } for (int i = 0; i < _protocolHeader.length; i++) { if (_protocolHeader[i] != pi._protocolHeader[i]) { return false; } } return (_protocolClass == pi._protocolClass && _protocolInstance == pi._protocolInstance && _protocolMajor == pi._protocolMajor && _protocolMinor == pi._protocolMinor); } @Override public int hashCode() { int result = _protocolHeader != null ? Arrays.hashCode(_protocolHeader) : 0; result = 31 * result + (int) _protocolClass; result = 31 * result + (int) _protocolInstance; result = 31 * result + (int) _protocolMajor; result = 31 * result + (int) _protocolMinor; return result; } public static class Decoder //implements MessageDecoder { /** * * @param in input buffer * @return true if we have enough data to decode the PI frame fully, false if more * data is required */ public boolean decodable(ByteBuffer in) { return (in.remaining() >= 8); } } public ProtocolVersion checkVersion() throws AMQException { if(_protocolHeader.length != 4) { throw new AMQProtocolHeaderException("Protocol header should have exactly four octets", null); } for(int i = 0; i < 4; i++) { if(_protocolHeader[i] != AMQP_HEADER[i]) { try { throw new AMQProtocolHeaderException("Protocol header is not correct: Got " + new String(_protocolHeader,"ISO-8859-1") + " should be: " + new String(AMQP_HEADER, "ISO-8859-1"), null); } catch (UnsupportedEncodingException e) { } } } ProtocolVersion pv; // Hack for 0-9-1 which changed how the header was defined if(_protocolInstance == 0 && _protocolMajor == 9 && _protocolMinor == 1) { pv = ProtocolVersion.v0_91; if (_protocolClass != 0) { throw new AMQProtocolClassException("Protocol class " + 0 + " was expected; received " + _protocolClass, null); } } else if (_protocolClass != CURRENT_PROTOCOL_CLASS) { throw new AMQProtocolClassException("Protocol class " + CURRENT_PROTOCOL_CLASS + " was expected; received " + _protocolClass, null); } else if (_protocolInstance != TCP_PROTOCOL_INSTANCE) { throw new AMQProtocolInstanceException("Protocol instance " + TCP_PROTOCOL_INSTANCE + " was expected; received " + _protocolInstance, null); } else { pv = new ProtocolVersion(_protocolMajor, _protocolMinor); } if (!pv.isSupported()) { // TODO: add list of available versions in list to msg... throw new AMQProtocolVersionException("Protocol version " + _protocolMajor + "." + _protocolMinor + " not suppoerted by this version of the Qpid broker.", null); } return pv; } public String toString() { StringBuffer buffer = new StringBuffer(new String(_protocolHeader)); buffer.append(Integer.toHexString(_protocolClass)); buffer.append(Integer.toHexString(_protocolInstance)); buffer.append(Integer.toHexString(_protocolMajor)); buffer.append(Integer.toHexString(_protocolMinor)); return buffer.toString(); } }
/*- * #%L * utils-commons * %% * Copyright (C) 2016 - 2018 Gilles Landel * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package fr.landel.utils.commons.function; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.Test; import fr.landel.utils.commons.exception.FunctionException; /** * Check {@link HexaPredicateThrowable} * * @since Nov 18, 2017 * @author Gilles * */ public class HexaPredicateThrowableTest { private static final String ERROR1 = "The first argument is null"; private static final String ERROR2 = "The second argument is null"; private static final String ERROR3 = "Both arguments are null"; private static final String ERROR4 = "First string is not in upper case"; private static final String ERROR5 = "Second string is not in upper case"; private static final String ERROR6 = "Both strings are not in upper case"; private static final HexaPredicateThrowable<String, String, Integer, String, String, String, IllegalArgumentException> P1 = (s1, s2, i, s3, s4, s5) -> { if (s1 != null && s2 != null && i > 0) { return s1.length() > s2.length(); } else if (s1 != null) { throw new IllegalArgumentException(ERROR2); } else if (s2 != null) { throw new IllegalArgumentException(ERROR1); } throw new IllegalArgumentException(ERROR3); }; private static final HexaPredicateThrowable<String, String, Integer, String, String, String, IllegalArgumentException> P2 = (s1, s2, i, s3, s4, s5) -> { String s1u = s1.toUpperCase(); String s2u = s2.toUpperCase(); if (s1u.equals(s1) && s2u.equals(s2)) { return s1u.contains(s2u) || s2u.contains(s1u); } else if (!s1u.equals(s1)) { throw new IllegalArgumentException(ERROR4); } else if (!s2u.equals(s2)) { throw new IllegalArgumentException(ERROR5); } throw new IllegalArgumentException(ERROR6); }; /** * Test method for * {@link HexaPredicateThrowable#test(java.lang.Object, java.lang.Object)}. */ @Test public void testTest() { try { assertTrue(P1.test("v12", "v8", 1, "turbo", "t", "u")); } catch (FunctionException e) { fail("Predicate failed"); } try { P1.test(null, "v2", 1, "turbo", "t", "u"); fail("Predicate has to fail"); } catch (IllegalArgumentException e) { assertNotNull(e); assertEquals(ERROR1, e.getMessage()); } } /** * Test method for * {@link HexaPredicateThrowable#testThrows(java.lang.Object, java.lang.Object)}. */ @Test public void testTestThrows() { try { assertTrue(P1.testThrows("v12", "v8", 1, "turbo", "t", "u")); } catch (IllegalArgumentException e) { fail("Predicate failed"); } try { P1.testThrows(null, "v2", 1, "turbo", "t", "u"); fail("Predicate has to fail"); } catch (IllegalArgumentException e) { assertNotNull(e); assertEquals(ERROR1, e.getMessage()); } } /** * Test method for * {@link HexaPredicateThrowable#and(HexaPredicateThrowable)}. */ @Test public void testAnd() { final HexaPredicateThrowable<String, String, Integer, String, String, String, IllegalArgumentException> pp = P1.and(P2); try { assertTrue(pp.testThrows("V12", "V1", 1, "turbo", "t", "u")); assertFalse(pp.testThrows("V12", "V8", 1, "turbo", "t", "u")); assertFalse(pp.testThrows("V6", "V12", 1, "turbo", "t", "u")); assertFalse(pp.testThrows("V6", "V6", 1, "turbo", "t", "u")); } catch (IllegalArgumentException e) { fail("Predicate failed"); } try { pp.testThrows(null, "V8", 1, "turbo", "t", "u"); fail("Predicate has to fail"); } catch (IllegalArgumentException e) { assertNotNull(e); assertEquals(ERROR1, e.getMessage()); } try { pp.testThrows("V12", "v8", 1, "turbo", "t", "u"); fail("Predicate has to fail"); } catch (IllegalArgumentException e) { assertNotNull(e); assertEquals(ERROR5, e.getMessage()); } } /** * Test method for {@link HexaPredicateThrowable#negateThrows()}. */ @Test public void testNegateThrows() { final HexaPredicateThrowable<String, String, Integer, String, String, String, IllegalArgumentException> pp = P1.negateThrows(); try { assertFalse(pp.testThrows("V12", "V8", 1, "turbo", "t", "u")); assertTrue(pp.testThrows("v6", "V8", 2, "turbo", "t", "u")); } catch (IllegalArgumentException e) { fail("Predicate failed"); } try { pp.testThrows("V6", null, 1, "turbo", "t", "u"); fail("Predicate has to fail"); } catch (IllegalArgumentException e) { assertNotNull(e); assertEquals(ERROR2, e.getMessage()); } } /** * Test method for * {@link HexaPredicateThrowable#or(HexaPredicateThrowable)}. */ @Test public void testOr() { final HexaPredicateThrowable<String, String, Integer, String, String, String, IllegalArgumentException> pp = P1.or(P2); try { assertTrue(pp.testThrows("V12", "V1", 1, "turbo", "t", "u")); assertTrue(pp.testThrows("V", "V1", 1, "turbo", "t", "u")); assertTrue(pp.testThrows("V12", "V12", 1, "turbo", "t", "u")); assertFalse(pp.testThrows("V6", "V12", 1, "turbo", "t", "u")); } catch (IllegalArgumentException e) { fail("Predicate failed"); } try { pp.testThrows(null, "V8", 1, "turbo", "t", "u"); fail("Predicate has to fail"); } catch (IllegalArgumentException e) { assertNotNull(e); assertEquals(ERROR1, e.getMessage()); } try { // first test pass and return true, so the second one is not // executed assertTrue(pp.testThrows("V12", "v8", 1, "turbo", "t", "u")); } catch (IllegalArgumentException e) { fail("Predicate failed"); } try { // first test pass and return false, so the next is executed pp.testThrows("v6", "V8", 1, "turbo", "t", "u"); fail("Predicate has to fail"); } catch (IllegalArgumentException e) { assertNotNull(e); assertEquals(ERROR4, e.getMessage()); } } }
/******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.badlogic.gdx.scenes.scene2d.ui; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.g2d.Batch; import com.badlogic.gdx.math.Matrix4; import com.badlogic.gdx.math.Rectangle; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.scenes.scene2d.Actor; import com.badlogic.gdx.scenes.scene2d.InputEvent; import com.badlogic.gdx.scenes.scene2d.InputListener; import com.badlogic.gdx.scenes.scene2d.utils.Drawable; import com.badlogic.gdx.scenes.scene2d.utils.Layout; import com.badlogic.gdx.scenes.scene2d.utils.ScissorStack; import com.badlogic.gdx.utils.GdxRuntimeException; /** A container that contains two widgets and is divided either horizontally or vertically. The user may resize the widgets. The * child widgets are always sized to fill their half of the splitpane. * <p> * The preferred size of a splitpane is that of the child widgets and the size of the {@link SplitPaneStyle#handle}. The widgets * are sized depending on the splitpane's size and the {@link #setSplitAmount(float) split position}. * @author mzechner * @author Nathan Sweet */ public class SplitPane extends WidgetGroup { SplitPaneStyle style; private Actor firstWidget, secondWidget; boolean vertical; float splitAmount = 0.5f, minAmount, maxAmount = 1; private float oldSplitAmount; private Rectangle firstWidgetBounds = new Rectangle(); private Rectangle secondWidgetBounds = new Rectangle(); Rectangle handleBounds = new Rectangle(); private Rectangle firstScissors = new Rectangle(); private Rectangle secondScissors = new Rectangle(); Vector2 lastPoint = new Vector2(); Vector2 handlePosition = new Vector2(); /** @param firstWidget May be null. * @param secondWidget May be null. */ public SplitPane (Actor firstWidget, Actor secondWidget, boolean vertical, Skin skin) { this(firstWidget, secondWidget, vertical, skin, "default-" + (vertical ? "vertical" : "horizontal")); } /** @param firstWidget May be null. * @param secondWidget May be null. */ public SplitPane (Actor firstWidget, Actor secondWidget, boolean vertical, Skin skin, String styleName) { this(firstWidget, secondWidget, vertical, skin.get(styleName, SplitPaneStyle.class)); } /** @param firstWidget May be null. * @param secondWidget May be null. */ public SplitPane (Actor firstWidget, Actor secondWidget, boolean vertical, SplitPaneStyle style) { this.firstWidget = firstWidget; this.secondWidget = secondWidget; this.vertical = vertical; setStyle(style); setFirstWidget(firstWidget); setSecondWidget(secondWidget); setSize(getPrefWidth(), getPrefHeight()); initialize(); } private void initialize () { addListener(new InputListener() { int draggingPointer = -1; public boolean touchDown (InputEvent event, float x, float y, int pointer, int button) { if (draggingPointer != -1) return false; if (pointer == 0 && button != 0) return false; if (handleBounds.contains(x, y)) { draggingPointer = pointer; lastPoint.set(x, y); handlePosition.set(handleBounds.x, handleBounds.y); return true; } return false; } public void touchUp (InputEvent event, float x, float y, int pointer, int button) { if (pointer == draggingPointer) draggingPointer = -1; } public void touchDragged (InputEvent event, float x, float y, int pointer) { if (pointer != draggingPointer) return; Drawable handle = style.handle; if (!vertical) { float delta = x - lastPoint.x; float availWidth = getWidth() - handle.getMinWidth(); float dragX = handlePosition.x + delta; handlePosition.x = dragX; dragX = Math.max(0, dragX); dragX = Math.min(availWidth, dragX); splitAmount = dragX / availWidth; if (splitAmount < minAmount) splitAmount = minAmount; if (splitAmount > maxAmount) splitAmount = maxAmount; lastPoint.set(x, y); } else { float delta = y - lastPoint.y; float availHeight = getHeight() - handle.getMinHeight(); float dragY = handlePosition.y + delta; handlePosition.y = dragY; dragY = Math.max(0, dragY); dragY = Math.min(availHeight, dragY); splitAmount = 1 - (dragY / availHeight); if (splitAmount < minAmount) splitAmount = minAmount; if (splitAmount > maxAmount) splitAmount = maxAmount; lastPoint.set(x, y); } invalidate(); } }); } public void setStyle (SplitPaneStyle style) { this.style = style; invalidateHierarchy(); } /** Returns the split pane's style. Modifying the returned style may not have an effect until {@link #setStyle(SplitPaneStyle)} * is called. */ public SplitPaneStyle getStyle () { return style; } @Override public void layout () { if (!vertical) calculateHorizBoundsAndPositions(); else calculateVertBoundsAndPositions(); Actor firstWidget = this.firstWidget; if (firstWidget != null) { Rectangle firstWidgetBounds = this.firstWidgetBounds; firstWidget.setBounds(firstWidgetBounds.x, firstWidgetBounds.y, firstWidgetBounds.width, firstWidgetBounds.height); if (firstWidget instanceof Layout) ((Layout)firstWidget).validate(); } Actor secondWidget = this.secondWidget; if (secondWidget != null) { Rectangle secondWidgetBounds = this.secondWidgetBounds; secondWidget.setBounds(secondWidgetBounds.x, secondWidgetBounds.y, secondWidgetBounds.width, secondWidgetBounds.height); if (secondWidget instanceof Layout) ((Layout)secondWidget).validate(); } } @Override public float getPrefWidth () { float width = 0; if (firstWidget != null) width = firstWidget instanceof Layout ? ((Layout)firstWidget).getPrefWidth() : firstWidget.getWidth(); if (secondWidget != null) width += secondWidget instanceof Layout ? ((Layout)secondWidget).getPrefWidth() : secondWidget.getWidth(); if (!vertical) width += style.handle.getMinWidth(); return width; } @Override public float getPrefHeight () { float height = 0; if (firstWidget != null) height = firstWidget instanceof Layout ? ((Layout)firstWidget).getPrefHeight() : firstWidget.getHeight(); if (secondWidget != null) height += secondWidget instanceof Layout ? ((Layout)secondWidget).getPrefHeight() : secondWidget.getHeight(); if (vertical) height += style.handle.getMinHeight(); return height; } public float getMinWidth () { return 0; } public float getMinHeight () { return 0; } public void setVertical (boolean vertical) { this.vertical = vertical; } private void calculateHorizBoundsAndPositions () { Drawable handle = style.handle; float height = getHeight(); float availWidth = getWidth() - handle.getMinWidth(); float leftAreaWidth = (int)(availWidth * splitAmount); float rightAreaWidth = availWidth - leftAreaWidth; float handleWidth = handle.getMinWidth(); firstWidgetBounds.set(0, 0, leftAreaWidth, height); secondWidgetBounds.set(leftAreaWidth + handleWidth, 0, rightAreaWidth, height); handleBounds.set(leftAreaWidth, 0, handleWidth, height); } private void calculateVertBoundsAndPositions () { Drawable handle = style.handle; float width = getWidth(); float height = getHeight(); float availHeight = height - handle.getMinHeight(); float topAreaHeight = (int)(availHeight * splitAmount); float bottomAreaHeight = availHeight - topAreaHeight; float handleHeight = handle.getMinHeight(); firstWidgetBounds.set(0, height - topAreaHeight, width, topAreaHeight); secondWidgetBounds.set(0, 0, width, bottomAreaHeight); handleBounds.set(0, bottomAreaHeight, width, handleHeight); } @Override public void draw (Batch batch, float parentAlpha) { validate(); Color color = getColor(); Drawable handle = style.handle; applyTransform(batch, computeTransform()); Matrix4 transform = batch.getTransformMatrix(); if (firstWidget != null) { getStage().calculateScissors(firstWidgetBounds, firstScissors); if (ScissorStack.pushScissors(firstScissors)) { if (firstWidget.isVisible()) firstWidget.draw(batch, parentAlpha * color.a); batch.flush(); ScissorStack.popScissors(); } } if (secondWidget != null) { getStage().calculateScissors(secondWidgetBounds, secondScissors); if (ScissorStack.pushScissors(secondScissors)) { if (secondWidget.isVisible()) secondWidget.draw(batch, parentAlpha * color.a); batch.flush(); ScissorStack.popScissors(); } } batch.setColor(color.r, color.g, color.b, color.a); handle.draw(batch, handleBounds.x, handleBounds.y, handleBounds.width, handleBounds.height); resetTransform(batch); } /** @param split The split amount between the min and max amount. */ public void setSplitAmount (float split) { this.splitAmount = Math.max(Math.min(maxAmount, split), minAmount); invalidate(); } public float getSplit () { return splitAmount; } public void setMinSplitAmount (float minAmount) { if (minAmount < 0) throw new GdxRuntimeException("minAmount has to be >= 0"); if (minAmount >= maxAmount) throw new GdxRuntimeException("minAmount has to be < maxAmount"); this.minAmount = minAmount; } public void setMaxSplitAmount (float maxAmount) { if (maxAmount > 1) throw new GdxRuntimeException("maxAmount has to be >= 0"); if (maxAmount <= minAmount) throw new GdxRuntimeException("maxAmount has to be > minAmount"); this.maxAmount = maxAmount; } /** @param widget May be null. */ public void setFirstWidget (Actor widget) { if (firstWidget != null) super.removeActor(firstWidget); firstWidget = widget; if (widget != null) super.addActor(widget); invalidate(); } /** @param widget May be null. */ public void setSecondWidget (Actor widget) { if (secondWidget != null) super.removeActor(secondWidget); secondWidget = widget; if (widget != null) super.addActor(widget); invalidate(); } public void addActor (Actor actor) { throw new UnsupportedOperationException("Use ScrollPane#setWidget."); } public void addActorAt (int index, Actor actor) { throw new UnsupportedOperationException("Use ScrollPane#setWidget."); } public void addActorBefore (Actor actorBefore, Actor actor) { throw new UnsupportedOperationException("Use ScrollPane#setWidget."); } public boolean removeActor (Actor actor) { throw new UnsupportedOperationException("Use ScrollPane#setWidget(null)."); } /** The style for a splitpane, see {@link SplitPane}. * @author mzechner * @author Nathan Sweet */ static public class SplitPaneStyle { public Drawable handle; public SplitPaneStyle () { } public SplitPaneStyle (Drawable handle) { this.handle = handle; } public SplitPaneStyle (SplitPaneStyle style) { this.handle = style.handle; } } }
/* * $Id$ */ /* Copyright (c) 2000-2016 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.state; import java.util.*; import org.apache.oro.text.regex.*; import org.lockss.test.*; import org.lockss.util.*; import org.lockss.daemon.*; import org.lockss.crawler.*; import org.lockss.plugin.*; import static org.lockss.state.SubstanceChecker.State; import static org.lockss.state.SubstanceChecker.NoSubstanceRedirectUrl; import static org.lockss.state.SubstanceChecker.NoSubstanceRedirectUrl.*; public class TestSubstanceChecker extends LockssTestCase { List PERMS = ListUtil.list("http://perm/"); List STARTS = ListUtil.list("http://start/"); MockArchivalUnit mau; SubstanceChecker checker; public void setUp() throws Exception { super.setUp(); mau = new MockArchivalUnit(); mau.setStartUrls(STARTS); mau.setPermissionUrls(PERMS); } public void testConfig() throws Exception { ConfigurationUtil.addFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_MODE, "None"); checker = new SubstanceChecker(mau); assertFalse(checker.isEnabledFor(SubstanceChecker.CONTEXT_CRAWL)); assertFalse(checker.isEnabledFor(SubstanceChecker.CONTEXT_VOTE)); ConfigurationUtil.addFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_MODE, "Crawl"); checker = new SubstanceChecker(mau); assertFalse(checker.isEnabledFor(SubstanceChecker.CONTEXT_CRAWL)); assertFalse(checker.isEnabledFor(SubstanceChecker.CONTEXT_VOTE)); mau.setSubstanceUrlPatterns(RegexpUtil.compileRegexps(ListUtil.list("x"))); checker = new SubstanceChecker(mau); assertTrue(checker.isEnabledFor(SubstanceChecker.CONTEXT_CRAWL)); assertFalse(checker.isEnabledFor(SubstanceChecker.CONTEXT_VOTE)); mau.setSubstanceUrlPatterns(null); checker = new SubstanceChecker(mau); assertFalse(checker.isEnabledFor(SubstanceChecker.CONTEXT_CRAWL)); mau.setNonSubstanceUrlPatterns(RegexpUtil.compileRegexps(ListUtil.list("x"))); checker = new SubstanceChecker(mau); assertTrue(checker.isEnabledFor(SubstanceChecker.CONTEXT_CRAWL)); ConfigurationUtil.addFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_MODE, "Crawl"); checker = new SubstanceChecker(mau); assertTrue(checker.isEnabledFor(SubstanceChecker.CONTEXT_CRAWL)); assertFalse(checker.isEnabledFor(SubstanceChecker.CONTEXT_VOTE)); ConfigurationUtil.addFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_MODE, "Vote"); checker = new SubstanceChecker(mau); assertFalse(checker.isEnabledFor(SubstanceChecker.CONTEXT_CRAWL)); assertTrue(checker.isEnabledFor(SubstanceChecker.CONTEXT_VOTE)); ConfigurationUtil.addFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_MODE, "All"); checker = new SubstanceChecker(mau); assertTrue(checker.isEnabledFor(SubstanceChecker.CONTEXT_CRAWL)); assertTrue(checker.isEnabledFor(SubstanceChecker.CONTEXT_VOTE)); } public void testNoPatterns() { checker = new SubstanceChecker(mau); assertEquals(State.Unknown, checker.hasSubstance()); } public void testSubst() throws Exception { mau.setSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "two" ))); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.hasSubstance()); check("http://four/"); assertEquals(State.No, checker.hasSubstance()); check("http://two/"); assertEquals(State.Yes, checker.hasSubstance()); } public void testNonSubst() throws Exception { mau.setNonSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "two" ))); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.hasSubstance()); check("http://two/"); assertEquals(State.No, checker.hasSubstance()); check("http://start/"); assertEquals(State.No, checker.hasSubstance()); check("http://perm/"); assertEquals(State.No, checker.hasSubstance()); check("http://other/"); assertEquals(State.Yes, checker.hasSubstance()); } public void testFindSubstNo() throws Exception { mau.setSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "two" ))); List<MockCachedUrl> mcus = new ArrayList<MockCachedUrl>(); mcus.add(mau.addUrl("http://four/", false, true)); mcus.add(mau.addUrl("http://three/", false, true)); mau.populateAuCachedUrlSet(); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.findSubstance()); assertEquals(State.No, checker.hasSubstance()); for (MockCachedUrl mcu : mcus) { assertFalse("CU left open: " + mcu, mcu.isOpen()); } } public void testFindSubstYes() throws Exception { mau.setSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "two" ))); List<MockCachedUrl> mcus = new ArrayList<MockCachedUrl>(); mcus.add(mau.addUrl("http://four/", false, true)); mcus.add(mau.addUrl("http://two/", false, true)); mcus.add(mau.addUrl("http://three/", false, true)); mau.populateAuCachedUrlSet(); checker = new SubstanceChecker(mau); assertEquals(State.Yes, checker.findSubstance()); assertEquals(State.Yes, checker.hasSubstance()); for (MockCachedUrl mcu : mcus) { assertFalse("CU left open: " + mcu, mcu.isOpen()); } } public void testCountSubst() throws Exception { mau.setSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "two" ))); checker = new SubstanceChecker(mau); checker.setSubstanceMin(3); assertEquals(State.No, checker.hasSubstance()); assertEquals(0, checker.getSubstanceCnt()); check("http://four/"); assertEquals(State.No, checker.hasSubstance()); assertEquals(0, checker.getSubstanceCnt()); check("http://two/"); assertEquals(State.Yes, checker.hasSubstance()); assertEquals(1, checker.getSubstanceCnt()); check("http://one/"); assertEquals(State.Yes, checker.hasSubstance()); assertEquals(2, checker.getSubstanceCnt()); check("http://one/"); assertEquals(State.Yes, checker.hasSubstance()); assertEquals(3, checker.getSubstanceCnt()); // should stop testing URL once reaches 3 check("http://one/"); assertEquals(State.Yes, checker.hasSubstance()); assertEquals(3, checker.getSubstanceCnt()); } public void testPluginPredicate() throws Exception { mau.setSubstancePredicate(new SubstancePredicate() { public boolean isSubstanceUrl(String url) { return url.indexOf("yes") >= 0; }}); checker = new SubstanceChecker(mau); assertTrue(checker.isSubstanceUrl("xxyesxx")); assertFalse(checker.isSubstanceUrl("xxnoxx")); assertEquals(State.No, checker.hasSubstance()); check("http://four/"); assertEquals(State.No, checker.hasSubstance()); check("http://yes/"); assertEquals(State.Yes, checker.hasSubstance()); } public void testRedirSubstLast() throws Exception { mau.setSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "redd" ))); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.hasSubstance()); check("http://four/"); assertEquals(State.No, checker.hasSubstance()); check("http://two/", "http://reddd/", Last); assertEquals(State.Yes, checker.hasSubstance()); } public void testRedirSubstFirst() throws Exception { ConfigurationUtil.setFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_REDIRECT_URL, "First"); mau.setSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "redd" ))); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.hasSubstance()); check("http://four/"); assertEquals(State.No, checker.hasSubstance()); check("http://two/", "http://reddd/", First); assertEquals(State.No, checker.hasSubstance()); check("http://reddd/", "http://three/", First); assertEquals(State.Yes, checker.hasSubstance()); } public void testRedirSubstAll() throws Exception { ConfigurationUtil.setFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_REDIRECT_URL, "All"); mau.setSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "redd" ))); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.hasSubstance()); check("http://four/"); assertEquals(State.No, checker.hasSubstance()); check("http://two/", "http://xxx/", All); assertEquals(State.No, checker.hasSubstance()); check(ListUtil.list("http://frob/", "http://reddd/", "http://three/"), All); assertEquals(State.Yes, checker.hasSubstance()); } public void testRedirSubstAll2() throws Exception { ConfigurationUtil.setFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_REDIRECT_URL, "All"); mau.setSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "redd" ))); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.hasSubstance()); check("http://four/"); assertEquals(State.No, checker.hasSubstance()); check("http://one/", "http://xxx/", All); assertEquals(State.Yes, checker.hasSubstance()); } public void testRedirNonSubstLast() throws Exception { mau.setNonSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "redd" ))); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.hasSubstance()); check("http://one/"); assertEquals(State.No, checker.hasSubstance()); check("http://two/", "http://one/", Last); assertEquals(State.No, checker.hasSubstance()); check("http://two/", "http://six/", Last); assertEquals(State.Yes, checker.hasSubstance()); } public void testRedirNonSubstFirst() throws Exception { ConfigurationUtil.setFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_REDIRECT_URL, "First"); mau.setNonSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "redd" ))); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.hasSubstance()); check("http://one/"); assertEquals(State.No, checker.hasSubstance()); check("http://redd/", "http://two/", First); assertEquals(State.No, checker.hasSubstance()); check("http://three/", "http://one/", First); assertEquals(State.Yes, checker.hasSubstance()); } public void testRedirNonSubstAll() throws Exception { ConfigurationUtil.setFromArgs(SubstanceChecker.PARAM_DETECT_NO_SUBSTANCE_REDIRECT_URL, "All"); mau.setNonSubstanceUrlPatterns(compileRegexps(ListUtil.list("one", "redd", "green"))); checker = new SubstanceChecker(mau); assertEquals(State.No, checker.hasSubstance()); check("http://one/"); assertEquals(State.No, checker.hasSubstance()); check("http://redd/", "http://green/", All); assertEquals(State.No, checker.hasSubstance()); check("http://one/", "http://redddd/", All); assertEquals(State.No, checker.hasSubstance()); check(ListUtil.list("http://one/", "http://redd/", "http://green/"), All); assertEquals(State.No, checker.hasSubstance()); check(ListUtil.list("http://twu/", "http://splortch/", "http://green/"), All); assertEquals(State.Yes, checker.hasSubstance()); } void check(String url) { MockCachedUrl mcu = new MockCachedUrl(url); assertEquals(ListUtil.list(url), checker.getUrlsToCheck(mcu)); checker.checkSubstance(mcu); } void check(String url, String redirTo, NoSubstanceRedirectUrl mode) { MockCachedUrl first = mau.addUrl(url); CIProperties props = new CIProperties(); props.put(CachedUrl.PROPERTY_REDIRECTED_TO, redirTo); props.put(CachedUrl.PROPERTY_CONTENT_URL, redirTo); first.setProperties(props); switch (mode) { case First: assertEquals(ListUtil.list(url), checker.getUrlsToCheck(first)); break; case Last: assertEquals(ListUtil.list(redirTo), checker.getUrlsToCheck(first)); break; case All: assertEquals(ListUtil.list(url, redirTo), checker.getUrlsToCheck(first)); break; } checker.checkSubstance(first); } void check(List<String> urls, NoSubstanceRedirectUrl mode) { List<String> remUrls = new ArrayList(urls); String first = remUrls.remove(0); String last = urls.get(urls.size() - 1); MockCachedUrl firstCu = mau.addUrl(first); List<MockCachedUrl> mcus = new ArrayList<MockCachedUrl>(); mcus.add(firstCu); MockCachedUrl mcu = firstCu; for (String url : remUrls) { CIProperties props = new CIProperties(); props.put(CachedUrl.PROPERTY_REDIRECTED_TO, url); props.put(CachedUrl.PROPERTY_CONTENT_URL, last); mcu.setProperties(props); props = new CIProperties(); mcu = mau.addUrl(url); mcus.add(mcu); } switch (mode) { case First: assertEquals(ListUtil.list(first), checker.getUrlsToCheck(firstCu)); break; case Last: assertEquals(ListUtil.list(last), checker.getUrlsToCheck(firstCu)); break; case All: assertEquals(urls, checker.getUrlsToCheck(firstCu)); break; } checker.checkSubstance(firstCu); for (MockCachedUrl amcu : mcus) { if (amcu != firstCu) { assertFalse("CU left open: " + amcu, amcu.isOpen()); }; } } List<Pattern> compileRegexps(List<String> regexps) throws MalformedPatternException { return RegexpUtil.compileRegexps(regexps); } }
/* * Copyright 2014-2015 Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.routing.bgp; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.SimpleChannelHandler; import org.jboss.netty.util.HashedWheelTimer; import org.jboss.netty.util.Timeout; import org.jboss.netty.util.Timer; import org.jboss.netty.util.TimerTask; import org.onlab.packet.Ip4Address; import org.onlab.packet.Ip4Prefix; import org.onlab.packet.Ip6Prefix; import org.onlab.packet.IpPrefix; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.Collection; import java.util.Collections; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; /** * Class for handling the BGP peer sessions. * There is one instance per each BGP peer session. */ public class BgpSession extends SimpleChannelHandler { private static final Logger log = LoggerFactory.getLogger(BgpSession.class); private final BgpSessionManager bgpSessionManager; // Local flag to indicate the session is closed. // It is used to avoid the Netty's asynchronous closing of a channel. private boolean isClosed = false; // BGP session info: local and remote private final BgpSessionInfo localInfo; // BGP session local info private final BgpSessionInfo remoteInfo; // BGP session remote info // Timers state private Timer timer = new HashedWheelTimer(); private volatile Timeout keepaliveTimeout; // Periodic KEEPALIVE private volatile Timeout sessionTimeout; // Session timeout // BGP RIB-IN routing entries from this peer private ConcurrentMap<Ip4Prefix, BgpRouteEntry> bgpRibIn4 = new ConcurrentHashMap<>(); private ConcurrentMap<Ip6Prefix, BgpRouteEntry> bgpRibIn6 = new ConcurrentHashMap<>(); /** * Constructor for a given BGP Session Manager. * * @param bgpSessionManager the BGP Session Manager to use */ BgpSession(BgpSessionManager bgpSessionManager) { this.bgpSessionManager = bgpSessionManager; this.localInfo = new BgpSessionInfo(); this.remoteInfo = new BgpSessionInfo(); // NOTE: We support only BGP4 this.localInfo.setBgpVersion(BgpConstants.BGP_VERSION); } /** * Gets the BGP Session Manager. * * @return the BGP Session Manager */ BgpSessionManager getBgpSessionManager() { return bgpSessionManager; } /** * Gets the BGP Session local information. * * @return the BGP Session local information. */ public BgpSessionInfo localInfo() { return localInfo; } /** * Gets the BGP Session remote information. * * @return the BGP Session remote information. */ public BgpSessionInfo remoteInfo() { return remoteInfo; } /** * Gets the BGP Multiprotocol Extensions for the session. * * @return true if the BGP Multiprotocol Extensions are enabled for the * session, otherwise false */ public boolean mpExtensions() { return remoteInfo.mpExtensions() && localInfo.mpExtensions(); } /** * Gets the BGP session 4 octet AS path capability. * * @return true when the BGP session is 4 octet AS path capable */ public boolean isAs4OctetCapable() { return remoteInfo.as4OctetCapability() && localInfo.as4OctetCapability(); } /** * Gets the IPv4 BGP RIB-IN routing entries. * * @return the IPv4 BGP RIB-IN routing entries */ public Collection<BgpRouteEntry> getBgpRibIn4() { return bgpRibIn4.values(); } /** * Gets the IPv6 BGP RIB-IN routing entries. * * @return the IPv6 BGP RIB-IN routing entries */ public Collection<BgpRouteEntry> getBgpRibIn6() { return bgpRibIn6.values(); } /** * Finds an IPv4 BGP routing entry for a prefix in the IPv4 BGP RIB-IN. * * @param prefix the IPv4 prefix of the route to search for * @return the IPv4 BGP routing entry if found, otherwise null */ public BgpRouteEntry findBgpRoute(Ip4Prefix prefix) { return bgpRibIn4.get(prefix); } /** * Finds an IPv6 BGP routing entry for a prefix in the IPv6 BGP RIB-IN. * * @param prefix the IPv6 prefix of the route to search for * @return the IPv6 BGP routing entry if found, otherwise null */ public BgpRouteEntry findBgpRoute(Ip6Prefix prefix) { return bgpRibIn6.get(prefix); } /** * Finds a BGP routing entry for a prefix in the BGP RIB-IN. The prefix * can be either IPv4 or IPv6. * * @param prefix the IP prefix of the route to search for * @return the BGP routing entry if found, otherwise null */ public BgpRouteEntry findBgpRoute(IpPrefix prefix) { if (prefix.isIp4()) { // IPv4 prefix Ip4Prefix ip4Prefix = prefix.getIp4Prefix(); return bgpRibIn4.get(ip4Prefix); } // IPv6 prefix Ip6Prefix ip6Prefix = prefix.getIp6Prefix(); return bgpRibIn6.get(ip6Prefix); } /** * Adds a BGP route. The route can be either IPv4 or IPv6. * * @param bgpRouteEntry the BGP route entry to use */ void addBgpRoute(BgpRouteEntry bgpRouteEntry) { if (bgpRouteEntry.isIp4()) { // IPv4 route Ip4Prefix ip4Prefix = bgpRouteEntry.prefix().getIp4Prefix(); bgpRibIn4.put(ip4Prefix, bgpRouteEntry); } else { // IPv6 route Ip6Prefix ip6Prefix = bgpRouteEntry.prefix().getIp6Prefix(); bgpRibIn6.put(ip6Prefix, bgpRouteEntry); } } /** * Removes an IPv4 BGP route for a prefix. * * @param prefix the prefix to use * @return true if the route was found and removed, otherwise false */ boolean removeBgpRoute(Ip4Prefix prefix) { return (bgpRibIn4.remove(prefix) != null); } /** * Removes an IPv6 BGP route for a prefix. * * @param prefix the prefix to use * @return true if the route was found and removed, otherwise false */ boolean removeBgpRoute(Ip6Prefix prefix) { return (bgpRibIn6.remove(prefix) != null); } /** * Removes a BGP route for a prefix. The prefix can be either IPv4 or IPv6. * * @param prefix the prefix to use * @return true if the route was found and removed, otherwise false */ boolean removeBgpRoute(IpPrefix prefix) { if (prefix.isIp4()) { return (bgpRibIn4.remove(prefix.getIp4Prefix()) != null); // IPv4 } return (bgpRibIn6.remove(prefix.getIp6Prefix()) != null); // IPv6 } /** * Tests whether the session is closed. * <p> * NOTE: We use this method to avoid the Netty's asynchronous closing * of a channel. * </p> * @return true if the session is closed */ boolean isClosed() { return isClosed; } /** * Closes the session. * * @param ctx the Channel Handler Context */ void closeSession(ChannelHandlerContext ctx) { timer.stop(); closeChannel(ctx); } /** * Closes the Netty channel. * * @param ctx the Channel Handler Context */ void closeChannel(ChannelHandlerContext ctx) { isClosed = true; ctx.getChannel().close(); } @Override public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent channelEvent) { bgpSessionManager.addSessionChannel(channelEvent.getChannel()); } @Override public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent channelEvent) { bgpSessionManager.removeSessionChannel(channelEvent.getChannel()); } @Override public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent channelEvent) { localInfo.setAddress(ctx.getChannel().getLocalAddress()); remoteInfo.setAddress(ctx.getChannel().getRemoteAddress()); // Assign the local and remote IPv4 addresses InetAddress inetAddr; if (localInfo.address() instanceof InetSocketAddress) { inetAddr = ((InetSocketAddress) localInfo.address()).getAddress(); localInfo.setIp4Address(Ip4Address.valueOf(inetAddr.getAddress())); } if (remoteInfo.address() instanceof InetSocketAddress) { inetAddr = ((InetSocketAddress) remoteInfo.address()).getAddress(); remoteInfo.setIp4Address(Ip4Address.valueOf(inetAddr.getAddress())); } log.debug("BGP Session Connected from {} on {}", remoteInfo.address(), localInfo.address()); if (!bgpSessionManager.peerConnected(this)) { log.debug("Cannot setup BGP Session Connection from {}. Closing...", remoteInfo.address()); ctx.getChannel().close(); } // // Assign the local BGP ID // NOTE: This should be configuration-based // localInfo.setBgpId(bgpSessionManager.getMyBgpId()); } @Override public void channelDisconnected(ChannelHandlerContext ctx, ChannelStateEvent channelEvent) { log.debug("BGP Session Disconnected from {} on {}", ctx.getChannel().getRemoteAddress(), ctx.getChannel().getLocalAddress()); processChannelDisconnected(); } @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) { log.debug("BGP Session Exception Caught from {} on {}: {}", ctx.getChannel().getRemoteAddress(), ctx.getChannel().getLocalAddress(), e); processChannelDisconnected(); } /** * Processes the channel being disconnected. */ private void processChannelDisconnected() { // // Withdraw the routes advertised by this BGP peer // // NOTE: We must initialize the RIB-IN before propagating the withdraws // for further processing. Otherwise, the BGP Decision Process // will use those routes again. // Collection<BgpRouteEntry> deletedRoutes4 = bgpRibIn4.values(); Collection<BgpRouteEntry> deletedRoutes6 = bgpRibIn6.values(); bgpRibIn4 = new ConcurrentHashMap<>(); bgpRibIn6 = new ConcurrentHashMap<>(); // Push the updates to the BGP Merged RIB BgpRouteSelector bgpRouteSelector = bgpSessionManager.getBgpRouteSelector(); Collection<BgpRouteEntry> addedRoutes = Collections.emptyList(); bgpRouteSelector.routeUpdates(this, addedRoutes, deletedRoutes4); bgpRouteSelector.routeUpdates(this, addedRoutes, deletedRoutes6); bgpSessionManager.peerDisconnected(this); } /** * Restarts the BGP KeepaliveTimer. * * @param ctx the Channel Handler Context to use */ void restartKeepaliveTimer(ChannelHandlerContext ctx) { long localKeepaliveInterval = 0; // // Compute the local Keepalive interval // if (localInfo.holdtime() != 0) { localKeepaliveInterval = Math.max(localInfo.holdtime() / BgpConstants.BGP_KEEPALIVE_PER_HOLD_INTERVAL, BgpConstants.BGP_KEEPALIVE_MIN_INTERVAL); } // Restart the Keepalive timer if (localKeepaliveInterval == 0) { return; // Nothing to do } keepaliveTimeout = timer.newTimeout(new TransmitKeepaliveTask(ctx), localKeepaliveInterval, TimeUnit.SECONDS); } /** * Task class for transmitting KEEPALIVE messages. */ private final class TransmitKeepaliveTask implements TimerTask { private final ChannelHandlerContext ctx; /** * Constructor for given Channel Handler Context. * * @param ctx the Channel Handler Context to use */ TransmitKeepaliveTask(ChannelHandlerContext ctx) { this.ctx = ctx; } @Override public void run(Timeout timeout) throws Exception { if (timeout.isCancelled()) { return; } if (!ctx.getChannel().isOpen()) { return; } // Transmit the KEEPALIVE ChannelBuffer txMessage = BgpKeepalive.prepareBgpKeepalive(); ctx.getChannel().write(txMessage); // Restart the KEEPALIVE timer restartKeepaliveTimer(ctx); } } /** * Restarts the BGP Session Timeout Timer. * * @param ctx the Channel Handler Context to use */ void restartSessionTimeoutTimer(ChannelHandlerContext ctx) { if (remoteInfo.holdtime() == 0) { return; // Nothing to do } if (sessionTimeout != null) { sessionTimeout.cancel(); } sessionTimeout = timer.newTimeout(new SessionTimeoutTask(ctx), remoteInfo.holdtime(), TimeUnit.SECONDS); } /** * Task class for BGP Session timeout. */ private final class SessionTimeoutTask implements TimerTask { private final ChannelHandlerContext ctx; /** * Constructor for given Channel Handler Context. * * @param ctx the Channel Handler Context to use */ SessionTimeoutTask(ChannelHandlerContext ctx) { this.ctx = ctx; } @Override public void run(Timeout timeout) throws Exception { if (timeout.isCancelled()) { return; } if (!ctx.getChannel().isOpen()) { return; } log.debug("BGP Session Timeout: peer {}", remoteInfo.address()); // // ERROR: Invalid Optional Parameter Length field: Unspecific // // Send NOTIFICATION and close the connection int errorCode = BgpConstants.Notifications.HoldTimerExpired.ERROR_CODE; int errorSubcode = BgpConstants.Notifications.ERROR_SUBCODE_UNSPECIFIC; ChannelBuffer txMessage = BgpNotification.prepareBgpNotification(errorCode, errorSubcode, null); ctx.getChannel().write(txMessage); closeChannel(ctx); } } }
/** * Copyright 2011 multibit.org * * Licensed under the MIT license (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://opensource.org/licenses/mit-license.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.multibit.viewsystem.swing.view.panels; import com.google.bitcoin.core.Sha256Hash; import com.google.bitcoin.core.Transaction; import com.google.bitcoin.core.Utils; import com.google.bitcoin.core.Wallet.SendRequest; import org.bitcoinj.wallet.Protos.Wallet.EncryptionType; import org.multibit.MultiBit; import org.multibit.controller.Controller; import org.multibit.controller.bitcoin.BitcoinController; import org.multibit.exchange.CurrencyConverter; import org.multibit.model.bitcoin.BitcoinModel; import org.multibit.model.bitcoin.WalletBusyListener; import org.multibit.utils.ImageLoader; import org.multibit.viewsystem.swing.ColorAndFontConstants; import org.multibit.viewsystem.swing.MultiBitFrame; import org.multibit.viewsystem.swing.action.CancelBackToParentAction; import org.multibit.viewsystem.swing.action.OkBackToParentAction; import org.multibit.viewsystem.swing.action.SendBitcoinNowAction; import org.multibit.viewsystem.swing.view.components.MultiBitButton; import org.multibit.viewsystem.swing.view.components.MultiBitDialog; import org.multibit.viewsystem.swing.view.components.MultiBitLabel; import org.multibit.viewsystem.swing.view.components.MultiBitTitledPanel; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.swing.*; import java.awt.*; /** * The send bitcoin confirm panel. */ public class SendBitcoinConfirmPanel extends JPanel implements WalletBusyListener { private static final long serialVersionUID = 191435612399957705L; private static final Logger log = LoggerFactory.getLogger(SendBitcoinConfirmPanel.class); private static final int STENT_WIDTH = 10; private MultiBitFrame mainFrame; private MultiBitDialog sendBitcoinConfirmDialog; private final Controller controller; private final BitcoinController bitcoinController; private MultiBitLabel sendAddressText; private MultiBitLabel sendLabelText; private MultiBitLabel sendAmountText; private MultiBitLabel sendFeeText; private String sendAddress; private String sendLabel; private SendRequest sendRequest; private MultiBitLabel confirmText1; private MultiBitLabel confirmText2; private SendBitcoinNowAction sendBitcoinNowAction; private MultiBitButton sendButton; private MultiBitButton cancelButton; private JPasswordField walletPasswordField; private MultiBitLabel walletPasswordPromptLabel; private MultiBitLabel explainLabel; private static SendBitcoinConfirmPanel thisPanel = null; private static ImageIcon shapeTriangleIcon; private static ImageIcon shapeSquareIcon; private static ImageIcon shapeHeptagonIcon; private static ImageIcon shapeHexagonIcon; private static ImageIcon progress0Icon; static { shapeTriangleIcon = ImageLoader.createImageIcon(ImageLoader.SHAPE_TRIANGLE_ICON_FILE); shapeSquareIcon = ImageLoader.createImageIcon(ImageLoader.SHAPE_SQUARE_ICON_FILE); shapeHeptagonIcon = ImageLoader.createImageIcon(ImageLoader.SHAPE_PENTAGON_ICON_FILE); shapeHexagonIcon = ImageLoader.createImageIcon(ImageLoader.SHAPE_HEXAGON_ICON_FILE); progress0Icon = ImageLoader.createImageIcon(ShowTransactionsPanel.PROGRESS_0_ICON_FILE); } /** * Creates a new {@link SendBitcoinConfirmPanel}. */ public SendBitcoinConfirmPanel(BitcoinController bitcoinController, MultiBitFrame mainFrame, MultiBitDialog sendBitcoinConfirmDialog, SendRequest sendRequest) { super(); this.bitcoinController = bitcoinController; this.controller = this.bitcoinController; this.mainFrame = mainFrame; this.sendBitcoinConfirmDialog = sendBitcoinConfirmDialog; this.sendRequest = sendRequest; thisPanel = this; initUI(); cancelButton.requestFocusInWindow(); applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())); this.bitcoinController.registerWalletBusyListener(this); } /** * Initialise bitcoin confirm panel. */ public void initUI() { JPanel mainPanel = new JPanel(); mainPanel.setOpaque(false); setLayout(new BorderLayout()); add(mainPanel, BorderLayout.CENTER); mainPanel.setLayout(new GridBagLayout()); String[] keys = new String[] { "sendBitcoinPanel.addressLabel", "sendBitcoinPanel.labelLabel", "sendBitcoinPanel.amountLabel", "showPreferencesPanel.feeLabel.text", "showExportPrivateKeysPanel.walletPasswordPrompt"}; int stentWidth = MultiBitTitledPanel.calculateStentWidthForKeys(controller.getLocaliser(), keys, mainPanel) + ExportPrivateKeysPanel.STENT_DELTA; // Get the data out of the wallet preferences. sendAddress = this.bitcoinController.getModel().getActiveWalletPreference(BitcoinModel.SEND_ADDRESS); sendLabel = this.bitcoinController.getModel().getActiveWalletPreference(BitcoinModel.SEND_LABEL); String sendAmount = this.bitcoinController.getModel().getActiveWalletPreference(BitcoinModel.SEND_AMOUNT) + " " + controller.getLocaliser(). getString("sendBitcoinPanel.amountUnitLabel"); String sendAmountLocalised = CurrencyConverter.INSTANCE.prettyPrint(sendAmount); String fee = "0"; if (sendRequest != null) { fee = Utils.bitcoinValueToPlainString(sendRequest.fee); } String sendFeeLocalised = CurrencyConverter.INSTANCE.prettyPrint(fee); GridBagConstraints constraints = new GridBagConstraints(); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 0; constraints.gridy = 0; constraints.weightx = 0.3; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(MultiBitTitledPanel.createStent(STENT_WIDTH), constraints); ImageIcon bigIcon = ImageLoader.createImageIcon(ImageLoader.MULTIBIT_128_ICON_FILE); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 1; constraints.gridy = 2; constraints.weightx = 0.5; constraints.weighty = 0.2; constraints.gridwidth = 1; constraints.gridheight = 5; constraints.anchor = GridBagConstraints.CENTER; JLabel bigIconLabel = new JLabel(bigIcon); mainPanel.add(bigIconLabel, constraints); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 2; constraints.gridy = 0; constraints.weightx = 0.3; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(MultiBitTitledPanel.createStent(STENT_WIDTH, STENT_WIDTH), constraints); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 7; constraints.gridy = 1; constraints.weightx = 0.3; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(MultiBitTitledPanel.createStent(STENT_WIDTH), constraints); explainLabel = new MultiBitLabel(""); explainLabel.setText(controller.getLocaliser().getString("sendBitcoinConfirmView.message")); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 1; constraints.weightx = 0.8; constraints.weighty = 0.4; constraints.gridwidth = 5; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(explainLabel, constraints); mainPanel.add(MultiBitTitledPanel.createStent(explainLabel.getPreferredSize().width, explainLabel.getPreferredSize().height), constraints); JPanel detailPanel = new JPanel(new GridBagLayout()); detailPanel.setBackground(ColorAndFontConstants.VERY_LIGHT_BACKGROUND_COLOR); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 3; constraints.gridy = 2; constraints.weightx = 0.6; constraints.weighty = 0.8; constraints.gridwidth = 3; constraints.gridheight = 5; constraints.anchor = GridBagConstraints.CENTER; mainPanel.add(detailPanel, constraints); GridBagConstraints constraints2 = new GridBagConstraints(); constraints2.fill = GridBagConstraints.HORIZONTAL; constraints2.gridx = 0; constraints2.gridy = 0; constraints2.weightx = 0.3; constraints2.weighty = 0.05; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_START; detailPanel.add(MultiBitTitledPanel.createStent(stentWidth), constraints2); constraints2.fill = GridBagConstraints.HORIZONTAL; constraints2.gridx = 1; constraints2.gridy = 0; constraints2.weightx = 0.05; constraints2.weighty = 0.05; constraints2.gridwidth = 1; constraints2.gridheight = 1; constraints2.anchor = GridBagConstraints.CENTER; detailPanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS), constraints2); JLabel forcer1 = new JLabel(); forcer1.setOpaque(false); constraints2.fill = GridBagConstraints.HORIZONTAL; constraints2.gridx = 2; constraints2.gridy = 0; constraints2.weightx = 10; constraints2.weighty = 0.05; constraints2.gridwidth = 1; constraints2.gridheight = 1; constraints2.anchor = GridBagConstraints.LINE_END; detailPanel.add(forcer1, constraints2); MultiBitLabel sendAddressLabel = new MultiBitLabel(""); sendAddressLabel.setText(controller.getLocaliser().getString("sendBitcoinPanel.addressLabel")); constraints2.fill = GridBagConstraints.NONE; constraints2.gridx = 0; constraints2.gridy = 1; constraints2.weightx = 0.3; constraints2.weighty = 0.1; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_END; detailPanel.add(sendAddressLabel, constraints2); sendAddressText = new MultiBitLabel(""); sendAddressText.setText(sendAddress); constraints2.fill = GridBagConstraints.NONE; constraints2.gridx = 2; constraints2.gridy = 1; constraints2.weightx = 0.3; constraints2.weighty = 0.1; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_START; detailPanel.add(sendAddressText, constraints2); MultiBitLabel sendLabelLabel = new MultiBitLabel(""); sendLabelLabel.setText(controller.getLocaliser().getString("sendBitcoinPanel.labelLabel")); constraints2.fill = GridBagConstraints.NONE; constraints2.gridx = 0; constraints2.gridy = 2; constraints2.weightx = 0.3; constraints2.weighty = 0.1; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_END; detailPanel.add(sendLabelLabel, constraints2); sendLabelText = new MultiBitLabel(""); sendLabelText.setText(sendLabel); constraints2.fill = GridBagConstraints.NONE; constraints2.gridx = 2; constraints2.gridy = 2; constraints2.weightx = 0.3; constraints2.weighty = 0.1; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_START; detailPanel.add(sendLabelText, constraints2); MultiBitLabel sendAmountLabel = new MultiBitLabel(""); sendAmountLabel.setText(controller.getLocaliser().getString("sendBitcoinPanel.amountLabel")); constraints2.fill = GridBagConstraints.NONE; constraints2.gridx = 0; constraints2.gridy = 3; constraints2.weightx = 0.3; constraints2.weighty = 0.1; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_END; detailPanel.add(sendAmountLabel, constraints2); sendAmountText = new MultiBitLabel(""); sendAmountText.setText(sendAmountLocalised); constraints2.fill = GridBagConstraints.NONE; constraints2.gridx = 2; constraints2.gridy = 3; constraints2.weightx = 0.3; constraints2.weighty = 0.1; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_START; detailPanel.add(sendAmountText, constraints2); MultiBitLabel sendFeeLabel = new MultiBitLabel(""); sendFeeLabel.setText(controller.getLocaliser().getString("showPreferencesPanel.feeLabel.text")); constraints2.fill = GridBagConstraints.NONE; constraints2.gridx = 0; constraints2.gridy = 4; constraints2.weightx = 0.3; constraints2.weighty = 0.1; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_END; detailPanel.add(sendFeeLabel, constraints2); sendFeeText = new MultiBitLabel(""); sendFeeText.setText(sendFeeLocalised); constraints2.fill = GridBagConstraints.NONE; constraints2.gridx = 2; constraints2.gridy = 4; constraints2.weightx = 0.3; constraints2.weighty = 0.1; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_START; detailPanel.add(sendFeeText, constraints2); constraints2.fill = GridBagConstraints.HORIZONTAL; constraints2.gridx = 0; constraints2.gridy = 5; constraints2.weightx = 0.3; constraints2.weighty = 0.05; constraints2.gridwidth = 1; constraints2.anchor = GridBagConstraints.LINE_START; detailPanel.add(MultiBitTitledPanel.createStent(stentWidth), constraints2); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 3; constraints.gridy = 7; constraints.weightx = 0.3; constraints.weighty = 0.3; constraints.gridheight = 1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(MultiBitTitledPanel.createStent(stentWidth), constraints); // Add wallet password field. walletPasswordPromptLabel = new MultiBitLabel(controller.getLocaliser().getString("showExportPrivateKeysPanel.walletPasswordPrompt")); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 8; constraints.weightx = 0.3; constraints.weighty = 0.1; constraints.gridheight = 1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_END; mainPanel.add(walletPasswordPromptLabel, constraints); mainPanel.add(MultiBitTitledPanel.createStent(walletPasswordPromptLabel.getPreferredSize().width, walletPasswordPromptLabel.getPreferredSize().height), constraints); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 4; constraints.gridy = 7; constraints.weightx = 0.05; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.CENTER; mainPanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS), constraints); JLabel forcer2 = new JLabel(); forcer2.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 5; constraints.gridy = 7; constraints.weightx = 10; constraints.weighty = 0.05; constraints.gridwidth = 1; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_END; mainPanel.add(forcer2, constraints); JPanel filler4 = new JPanel(); filler4.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 3; constraints.gridy = 7; constraints.weightx = 0.3; constraints.weighty = 0.01; constraints.gridheight = 1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(filler4, constraints); walletPasswordField = new JPasswordField(24); walletPasswordField.setMinimumSize(new Dimension(200, 20)); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 5; constraints.gridy = 8; constraints.weightx = 0.3; constraints.weighty = 0.1; constraints.gridheight = 1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(walletPasswordField, constraints); mainPanel.add(MultiBitTitledPanel.createStent(200, 20), constraints); JPanel filler5 = new JPanel(); filler4.setOpaque(false); constraints.fill = GridBagConstraints.BOTH; constraints.gridx = 3; constraints.gridy = 9; constraints.weightx = 0.3; constraints.weighty = 0.01; constraints.gridheight = 1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(filler5, constraints); if (this.bitcoinController.getModel().getActiveWallet() != null) { if (this.bitcoinController.getModel().getActiveWallet().getEncryptionType() == EncryptionType.ENCRYPTED_SCRYPT_AES) { // Need wallet password. walletPasswordField.setEnabled(true); walletPasswordPromptLabel.setEnabled(true); } else { // No wallet password required. walletPasswordField.setEnabled(false); walletPasswordPromptLabel.setEnabled(false); } } JPanel buttonPanel = new JPanel(); buttonPanel.setOpaque(false); //buttonPanel.setBorder(BorderFactory.createLineBorder(Color.RED)); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 3; constraints.gridy = 10; constraints.weightx = 0.8; constraints.weighty = 0.1; constraints.gridwidth = 4; constraints.gridheight = 1; constraints.anchor = GridBagConstraints.LINE_END; mainPanel.add(buttonPanel, constraints); CancelBackToParentAction cancelAction = new CancelBackToParentAction(controller, ImageLoader.createImageIcon(ImageLoader.CROSS_ICON_FILE), sendBitcoinConfirmDialog); cancelButton = new MultiBitButton(cancelAction, controller); buttonPanel.add(cancelButton); sendBitcoinNowAction = new SendBitcoinNowAction(mainFrame, this.bitcoinController, this, walletPasswordField, ImageLoader.createImageIcon(ImageLoader.SEND_BITCOIN_ICON_FILE), sendRequest); sendButton = new MultiBitButton(sendBitcoinNowAction, controller); buttonPanel.add(sendButton); confirmText1 = new MultiBitLabel(""); confirmText1.setText(" "); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 11; constraints.weightx = 0.8; constraints.weighty = 0.15; constraints.gridwidth = 6; constraints.anchor = GridBagConstraints.LINE_END; mainPanel.add(confirmText1, constraints); JLabel filler3 = new JLabel(); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 7; constraints.gridy = 11; constraints.weightx = 0.05; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(filler3, constraints); confirmText2 = new MultiBitLabel(" "); constraints.fill = GridBagConstraints.NONE; constraints.gridx = 1; constraints.gridy = 12; constraints.weightx = 0.8; constraints.weighty = 0.15; constraints.gridwidth = 6; constraints.anchor = GridBagConstraints.LINE_END; mainPanel.add(confirmText2, constraints); JLabel filler6 = new JLabel(); constraints.fill = GridBagConstraints.HORIZONTAL; constraints.gridx = 7; constraints.gridy = 12; constraints.weightx = 0.05; constraints.weighty = 0.1; constraints.gridwidth = 1; constraints.anchor = GridBagConstraints.LINE_START; mainPanel.add(filler6, constraints); enableSendAccordingToNumberOfConnectedPeersAndWalletBusy(); } private void enableSendAccordingToNumberOfConnectedPeersAndWalletBusy() { boolean enableSend = false; String message = " "; if (this.controller.getModel() != null) { String singleNodeConnection = this.controller.getModel().getUserPreference(BitcoinModel.SINGLE_NODE_CONNECTION); boolean singleNodeConnectionOverride = singleNodeConnection != null && singleNodeConnection.trim().length() > 0; String peers = this.controller.getModel().getUserPreference(BitcoinModel.PEERS); boolean singlePeerOverride = peers != null && peers.split(",").length == 1; if (thisPanel.sendBitcoinNowAction != null) { if (!singleNodeConnectionOverride && !singlePeerOverride && this.bitcoinController.getModel().getNumberOfConnectedPeers() < BitcoinModel.MINIMUM_NUMBER_OF_CONNECTED_PEERS_BEFORE_SEND_IS_ENABLED) { // Disable send button enableSend = false; message = controller.getLocaliser().getString("sendBitcoinConfirmView.multibitMustBeOnline"); } else { // Enable send button enableSend = true; message = " "; } if (this.bitcoinController.getModel().getActivePerWalletModelData().isBusy()) { enableSend = false; message = controller.getLocaliser().getString("multiBitSubmitAction.walletIsBusy", new Object[]{controller.getLocaliser().getString(this.bitcoinController.getModel().getActivePerWalletModelData().getBusyTaskKey())}); } thisPanel.sendBitcoinNowAction.setEnabled(enableSend); } } if (sendBitcoinNowAction != null) { sendBitcoinNowAction.setEnabled(enableSend); if (confirmText1 != null) { if (enableSend) { // Only clear the 'multibitMustBeOnline' message. if (controller.getLocaliser().getString("sendBitcoinConfirmView.multibitMustBeOnline").equals(confirmText1.getText())) { confirmText1.setText(message); } } else { confirmText1.setText(message); } } } } public void setMessageText(final String message1) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { confirmText1.setText(message1); }}); invalidate(); validate(); repaint(); } public void setMessageText(final String message1, final String message2) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { confirmText1.setText(message1); confirmText2.setText(" " + message2); }}); invalidate(); validate(); repaint(); } public void clearAfterSend() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { walletPasswordField.setText(""); walletPasswordField.setVisible(false); explainLabel.setVisible(false); walletPasswordPromptLabel.setVisible(false); }}); } public void showOkButton() { OkBackToParentAction okAction = new OkBackToParentAction(controller, sendBitcoinConfirmDialog); sendButton.setAction(okAction); cancelButton.setVisible(false); } public static void updatePanel() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (thisPanel != null && thisPanel.isVisible()) { final BitcoinController bitcoinController = MultiBit.getBitcoinController(); if (bitcoinController != null) { String singleNodeConnection = bitcoinController.getModel().getUserPreference(BitcoinModel.SINGLE_NODE_CONNECTION); boolean singleNodeConnectionOverride = singleNodeConnection != null && singleNodeConnection.trim().length() > 0; String peers = bitcoinController.getModel().getUserPreference(BitcoinModel.PEERS); boolean singlePeerOverride = peers != null && peers.split(",").length == 1; boolean enableSend = false; if (thisPanel.sendBitcoinNowAction != null) { if (!singleNodeConnectionOverride && !singlePeerOverride && bitcoinController.getModel().getNumberOfConnectedPeers() < BitcoinModel.MINIMUM_NUMBER_OF_CONNECTED_PEERS_BEFORE_SEND_IS_ENABLED) { // Disable send button enableSend = false; } else { // Enable send button enableSend = true; } if (bitcoinController.getModel().getActivePerWalletModelData().isBusy()) { enableSend = false; } thisPanel.sendBitcoinNowAction.setEnabled(enableSend); } MultiBitLabel confirmText1 = thisPanel.confirmText1; if (enableSend) { if (confirmText1 != null) { if (MultiBit.getController().getLocaliser() .getString("sendBitcoinConfirmView.multibitMustBeOnline").equals(confirmText1.getText())) { confirmText1.setText(" "); } } } else { if (confirmText1 != null) { confirmText1.setText(MultiBit.getController().getLocaliser() .getString("sendBitcoinConfirmView.multibitMustBeOnline")); } } } thisPanel.invalidate(); thisPanel.validate(); thisPanel.repaint(); } } }); } public static void updatePanelDueToTransactionConfidenceChange(final Sha256Hash transactionWithChangedConfidenceHash, final int numberOfPeersSeenBy) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { if (thisPanel == null || !thisPanel.isVisible() || thisPanel.getSendBitcoinNowAction() == null) { return; } Transaction sentTransaction = thisPanel.getSendBitcoinNowAction().getTransaction(); if (sentTransaction == null || !sentTransaction.getHash().equals(transactionWithChangedConfidenceHash)) { return; } MultiBitLabel confirmText2 = thisPanel.getConfirmText2(); if (confirmText2 != null) { confirmText2.setText(thisPanel.getConfidenceToolTip(numberOfPeersSeenBy)); confirmText2.setIcon(thisPanel.getConfidenceIcon(numberOfPeersSeenBy)); } thisPanel.invalidate(); thisPanel.validate(); thisPanel.repaint(); } }); } private String getConfidenceToolTip(int numberOfPeers) { StringBuilder builder = new StringBuilder(""); if (numberOfPeers == 0) { builder.append(MultiBit.getController().getLocaliser().getString("transactionConfidence.seenByUnknownNumberOfPeers")); } else { builder .append(MultiBit.getController().getLocaliser().getString("transactionConfidence.seenBy")) .append(" "); builder.append(numberOfPeers); if (numberOfPeers > 1) builder .append(" ") .append(MultiBit.getController().getLocaliser().getString("transactionConfidence.peers")) .append("."); else builder.append(" ") .append(MultiBit.getController().getLocaliser().getString("transactionConfidence.peer")) .append("."); } return builder.toString(); } private ImageIcon getConfidenceIcon(int numberOfPeers) { // By default return a triangle which indicates the least known. ImageIcon iconToReturn; if (numberOfPeers >= 4) { return progress0Icon; } else { switch (numberOfPeers) { case 0: iconToReturn = shapeTriangleIcon; break; case 1: iconToReturn = shapeSquareIcon; break; case 2: iconToReturn = shapeHeptagonIcon; break; case 3: iconToReturn = shapeHexagonIcon; break; default: iconToReturn = shapeTriangleIcon; } } return iconToReturn; } public MultiBitButton getCancelButton() { return cancelButton; } // Used in testing. public SendBitcoinNowAction getSendBitcoinNowAction() { return sendBitcoinNowAction; } public String getMessageText1() { return confirmText1.getText(); } public String getMessageText2() { return confirmText2.getText(); } public void setWalletPassword(CharSequence password) { walletPasswordField.setText(password.toString()); } public boolean isWalletPasswordFieldEnabled() { return walletPasswordField.isEnabled(); } public MultiBitLabel getConfirmText2() { return confirmText2; } @Override public void walletBusyChange(boolean newWalletIsBusy) { enableSendAccordingToNumberOfConnectedPeersAndWalletBusy(); } }
/* * Hibernate, Relational Persistence for Idiomatic Java * * Copyright (c) 2006-2011, Red Hat Inc. or third-party contributors as * indicated by the @author tags or express copyright attribution * statements applied by the authors. All third-party contributions are * distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU * Lesser General Public License, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this distribution; if not, write to: * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ package org.hibernate.test.hql; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Date; import java.util.List; import junit.framework.AssertionFailedError; import org.hibernate.QueryException; import org.hibernate.Session; import org.hibernate.Transaction; import org.hibernate.dialect.H2Dialect; import org.hibernate.dialect.MySQLDialect; import org.hibernate.exception.ConstraintViolationException; import org.hibernate.id.BulkInsertionCapableIdentifierGenerator; import org.hibernate.id.IdentifierGenerator; import org.hibernate.persister.entity.EntityPersister; import org.hibernate.testing.DialectChecks; import org.hibernate.testing.RequiresDialectFeature; import org.hibernate.testing.SkipLog; import org.hibernate.testing.TestForIssue; import org.hibernate.testing.junit4.BaseCoreFunctionalTestCase; import org.junit.Test; /** * Tests execution of bulk UPDATE/DELETE statements through the new AST parser. * * @author Steve Ebersole */ public class BulkManipulationTest extends BaseCoreFunctionalTestCase { public String[] getMappings() { return new String[] { "hql/Animal.hbm.xml", "hql/Vehicle.hbm.xml", "hql/KeyManyToOneEntity.hbm.xml", "hql/Versions.hbm.xml", "hql/FooBarCopy.hbm.xml", "legacy/Multi.hbm.xml", "hql/EntityWithCrazyCompositeKey.hbm.xml", "hql/SimpleEntityWithAssociation.hbm.xml", "hql/BooleanLiteralEntity.hbm.xml", "hql/CompositeIdEntity.hbm.xml" }; } protected Class<?>[] getAnnotatedClasses() { return new Class<?>[] { Farm.class, Crop.class }; } @Test public void testDeleteNonExistentEntity() { Session s = openSession(); Transaction t = s.beginTransaction(); try { s.createQuery( "delete NonExistentEntity" ).executeUpdate(); fail( "no exception thrown" ); } catch( QueryException ignore ) { } t.commit(); s.close(); } @Test public void testUpdateNonExistentEntity() { Session s = openSession(); Transaction t = s.beginTransaction(); try { s.createQuery( "update NonExistentEntity e set e.someProp = ?" ).executeUpdate(); fail( "no exception thrown" ); } catch( QueryException e ) { } t.commit(); s.close(); } @Test public void testTempTableGenerationIsolation() throws Throwable{ Session s = openSession(); s.beginTransaction(); Truck truck = new Truck(); truck.setVin( "123t" ); truck.setOwner( "Steve" ); s.save( truck ); // manually flush the session to ensure the insert happens s.flush(); // now issue a bulk delete against Car which should force the temp table to be // created. we need to test to ensure that this does not cause the transaction // to be committed... s.createQuery( "delete from Vehicle" ).executeUpdate(); s.getTransaction().rollback(); s.close(); s = openSession(); s.beginTransaction(); List list = s.createQuery( "from Car" ).list(); assertEquals( "temp table gen caused premature commit", 0, list.size() ); s.createQuery( "delete from Car" ).executeUpdate(); s.getTransaction().rollback(); s.close(); } @Test public void testBooleanHandling() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); // currently, we need the three different binds because they are different underlying types... int count = s.createQuery( "update BooleanLiteralEntity set yesNoBoolean = :b1, trueFalseBoolean = :b2, zeroOneBoolean = :b3" ) .setBoolean( "b1", true ) .setBoolean( "b2", true ) .setBoolean( "b3", true ) .executeUpdate(); assertEquals( 1, count ); BooleanLiteralEntity entity = ( BooleanLiteralEntity ) s.createQuery( "from BooleanLiteralEntity" ).uniqueResult(); assertTrue( entity.isYesNoBoolean() ); assertTrue( entity.isTrueFalseBoolean() ); assertTrue( entity.isZeroOneBoolean() ); s.clear(); count = s.createQuery( "update BooleanLiteralEntity set yesNoBoolean = true, trueFalseBoolean = true, zeroOneBoolean = true" ) .executeUpdate(); assertEquals( 1, count ); entity = ( BooleanLiteralEntity ) s.createQuery( "from BooleanLiteralEntity" ).uniqueResult(); assertTrue( entity.isYesNoBoolean() ); assertTrue( entity.isTrueFalseBoolean() ); assertTrue( entity.isZeroOneBoolean() ); t.commit(); s.close(); data.cleanup(); } @Test public void testSimpleInsert() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); s.createQuery( "insert into Pickup (id, vin, owner) select id, vin, owner from Car" ).executeUpdate(); t.commit(); t = s.beginTransaction(); s.createQuery( "delete Vehicle" ).executeUpdate(); t.commit(); s.close(); data.cleanup(); } @Test public void testSimpleNativeSQLInsert() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); List l = s.createQuery("from Vehicle").list(); assertEquals(l.size(),4); s.createSQLQuery( "insert into Pickup (id, vin, owner) select id, vin, owner from Car" ).executeUpdate(); l = s.createQuery("from Vehicle").list(); assertEquals( l.size(), 5 ); t.commit(); t = s.beginTransaction(); s.createSQLQuery( "delete from Truck" ).executeUpdate(); l = s.createQuery("from Vehicle").list(); assertEquals(l.size(),4); Car c = (Car) s.createQuery( "from Car where owner = 'Kirsten'" ).uniqueResult(); c.setOwner( "NotKirsten" ); assertEquals( 0, s.getNamedQuery( "native-delete-car" ).setString( 0, "Kirsten" ).executeUpdate() ); assertEquals( 1, s.getNamedQuery( "native-delete-car" ).setString( 0, "NotKirsten" ).executeUpdate() ); assertEquals( 0, s.createSQLQuery( "delete from SUV where owner = :owner" ) .setString( "owner", "NotThere" ) .executeUpdate() ); assertEquals( 1, s.createSQLQuery( "delete from SUV where owner = :owner" ) .setString( "owner", "Joe" ) .executeUpdate() ); s.createSQLQuery( "delete from Pickup" ).executeUpdate(); l = s.createQuery("from Vehicle").list(); assertEquals(l.size(),0); t.commit(); s.close(); data.cleanup(); } @Test public void testInsertWithManyToOne() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); s.createQuery( "insert into Animal (description, bodyWeight, mother) select description, bodyWeight, mother from Human" ).executeUpdate(); t.commit(); t = s.beginTransaction(); t.commit(); s.close(); data.cleanup(); } @Test public void testInsertWithMismatchedTypes() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); try { s.createQuery( "insert into Pickup (owner, vin, id) select id, vin, owner from Car" ).executeUpdate(); fail( "mismatched types did not error" ); } catch( QueryException e ) { // expected result } t.commit(); t = s.beginTransaction(); s.createQuery( "delete Vehicle" ).executeUpdate(); t.commit(); s.close(); data.cleanup(); } @Test public void testInsertIntoSuperclassPropertiesFails() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); try { s.createQuery( "insert into Human (id, bodyWeight) select id, bodyWeight from Lizard" ).executeUpdate(); fail( "superclass prop insertion did not error" ); } catch( QueryException e ) { // expected result } t.commit(); t = s.beginTransaction(); s.createQuery( "delete Animal where mother is not null" ).executeUpdate(); s.createQuery( "delete Animal where father is not null" ).executeUpdate(); s.createQuery( "delete Animal" ).executeUpdate(); t.commit(); s.close(); data.cleanup(); } @Test public void testInsertAcrossMappedJoinFails() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); try { s.createQuery( "insert into Joiner (name, joinedName) select vin, owner from Car" ).executeUpdate(); fail( "mapped-join insertion did not error" ); } catch( QueryException e ) { // expected result } t.commit(); t = s.beginTransaction(); s.createQuery( "delete Joiner" ).executeUpdate(); s.createQuery( "delete Vehicle" ).executeUpdate(); t.commit(); s.close(); data.cleanup(); } protected boolean supportsBulkInsertIdGeneration(Class entityClass) { EntityPersister persister = sessionFactory().getEntityPersister( entityClass.getName() ); IdentifierGenerator generator = persister.getIdentifierGenerator(); return BulkInsertionCapableIdentifierGenerator.class.isInstance( generator ) && BulkInsertionCapableIdentifierGenerator.class.cast( generator ).supportsBulkInsertionIdentifierGeneration(); } @Test public void testInsertWithGeneratedId() { // Make sure the env supports bulk inserts with generated ids... if ( !supportsBulkInsertIdGeneration( PettingZoo.class ) ) { SkipLog.reportSkip( "bulk id generation not supported", "test bulk inserts with generated id and generated timestamp" ); return; } // create a Zoo Zoo zoo = new Zoo(); zoo.setName( "zoo" ); Session s = openSession(); Transaction t = s.beginTransaction(); s.save( zoo ); t.commit(); s.close(); s = openSession(); t = s.beginTransaction(); int count = s.createQuery( "insert into PettingZoo (name) select name from Zoo" ).executeUpdate(); t.commit(); s.close(); assertEquals( "unexpected insertion count", 1, count ); s = openSession(); t = s.beginTransaction(); PettingZoo pz = ( PettingZoo ) s.createQuery( "from PettingZoo" ).uniqueResult(); t.commit(); s.close(); assertEquals( zoo.getName(), pz.getName() ); assertTrue( !zoo.getId().equals( pz.getId() ) ); s = openSession(); t = s.beginTransaction(); s.createQuery( "delete Zoo" ).executeUpdate(); t.commit(); s.close(); } @SuppressWarnings( {"UnnecessaryUnboxing"}) @Test public void testInsertWithGeneratedVersionAndId() { // Make sure the env supports bulk inserts with generated ids... if ( !supportsBulkInsertIdGeneration( IntegerVersioned.class ) ) { SkipLog.reportSkip( "bulk id generation not supported", "test bulk inserts with generated id and generated timestamp" ); return; } Session s = openSession(); Transaction t = s.beginTransaction(); IntegerVersioned entity = new IntegerVersioned( "int-vers" ); s.save( entity ); s.createQuery( "select id, name, version from IntegerVersioned" ).list(); t.commit(); s.close(); Long initialId = entity.getId(); int initialVersion = entity.getVersion(); s = openSession(); t = s.beginTransaction(); int count = s.createQuery( "insert into IntegerVersioned ( name ) select name from IntegerVersioned" ).executeUpdate(); t.commit(); s.close(); assertEquals( "unexpected insertion count", 1, count ); s = openSession(); t = s.beginTransaction(); IntegerVersioned created = ( IntegerVersioned ) s.createQuery( "from IntegerVersioned where id <> :initialId" ) .setLong( "initialId", initialId.longValue() ) .uniqueResult(); t.commit(); s.close(); assertEquals( "version was not seeded", initialVersion, created.getVersion() ); s = openSession(); t = s.beginTransaction(); s.createQuery( "delete IntegerVersioned" ).executeUpdate(); t.commit(); s.close(); } @Test @SuppressWarnings( {"UnnecessaryUnboxing"}) @RequiresDialectFeature( value = DialectChecks.SupportsParametersInInsertSelectCheck.class, comment = "dialect does not support parameter in INSERT ... SELECT" ) public void testInsertWithGeneratedTimestampVersion() { // Make sure the env supports bulk inserts with generated ids... if ( !supportsBulkInsertIdGeneration( TimestampVersioned.class ) ) { SkipLog.reportSkip( "bulk id generation not supported", "test bulk inserts with generated id and generated timestamp" ); return; } Session s = openSession(); Transaction t = s.beginTransaction(); TimestampVersioned entity = new TimestampVersioned( "int-vers" ); s.save( entity ); s.createQuery( "select id, name, version from TimestampVersioned" ).list(); t.commit(); s.close(); Long initialId = entity.getId(); //Date initialVersion = entity.getVersion(); s = openSession(); t = s.beginTransaction(); int count = s.createQuery( "insert into TimestampVersioned ( name ) select name from TimestampVersioned" ).executeUpdate(); t.commit(); s.close(); assertEquals( "unexpected insertion count", 1, count ); s = openSession(); t = s.beginTransaction(); TimestampVersioned created = ( TimestampVersioned ) s.createQuery( "from TimestampVersioned where id <> :initialId" ) .setLong( "initialId", initialId.longValue() ) .uniqueResult(); t.commit(); s.close(); assertNotNull( created.getVersion() ); //assertEquals( "version was not seeded", initialVersion, created.getVersion() ); s = openSession(); t = s.beginTransaction(); s.createQuery( "delete TimestampVersioned" ).executeUpdate(); t.commit(); s.close(); } @Test public void testInsertWithAssignedCompositeId() { // this just checks that the query parser detects that we are explicitly inserting a composite id Session s = openSession(); s.beginTransaction(); // intentionally reversing the order of the composite id properties to make sure that is supported too s.createQuery( "insert into CompositeIdEntity (key2, someProperty, key1) select a.key2, 'COPY', a.key1 from CompositeIdEntity a" ).executeUpdate(); s.createQuery( "delete from CompositeIdEntity" ).executeUpdate(); s.getTransaction().commit(); s.close(); } @Test public void testInsertWithSelectListUsingJoins() { // this is just checking parsing and syntax... Session s = openSession(); s.beginTransaction(); s.createQuery( "insert into Animal (description, bodyWeight) select h.description, h.bodyWeight from Human h where h.mother.mother is not null" ).executeUpdate(); s.createQuery( "insert into Animal (description, bodyWeight) select h.description, h.bodyWeight from Human h join h.mother m where m.mother is not null" ).executeUpdate(); s.createQuery( "delete from Animal" ).executeUpdate(); s.getTransaction().commit(); s.close(); } @Test public void testIncorrectSyntax() { Session s = openSession(); Transaction t = s.beginTransaction(); try { s.createQuery( "update Human set Human.description = 'xyz' where Human.id = 1 and Human.description is null" ); fail( "expected failure" ); } catch( QueryException expected ) { // ignore : expected behavior } t.commit(); s.close(); } @SuppressWarnings( {"unchecked"}) @Test public void testUpdateWithWhereExistsSubquery() { // multi-table ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Session s = openSession(); Transaction t = s.beginTransaction(); Human joe = new Human(); joe.setName( new Name( "Joe", 'Q', "Public" ) ); s.save( joe ); Human doll = new Human(); doll.setName( new Name( "Kyu", 'P', "Doll" ) ); doll.setFriends( new ArrayList() ); doll.getFriends().add( joe ); s.save( doll ); t.commit(); s.close(); s = openSession(); t = s.beginTransaction(); String updateQryString = "update Human h " + "set h.description = 'updated' " + "where exists (" + " select f.id " + " from h.friends f " + " where f.name.last = 'Public' " + ")"; int count = s.createQuery( updateQryString ).executeUpdate(); assertEquals( 1, count ); s.delete( doll ); s.delete( joe ); t.commit(); s.close(); // single-table (one-to-many & many-to-many) ~~~~~~~~~~~~~~~~~~~~~~~~~~ s = openSession(); t = s.beginTransaction(); SimpleEntityWithAssociation entity = new SimpleEntityWithAssociation(); SimpleEntityWithAssociation other = new SimpleEntityWithAssociation(); entity.setName( "main" ); other.setName( "many-to-many-association" ); entity.getManyToManyAssociatedEntities().add( other ); entity.addAssociation( "one-to-many-association" ); s.save( entity ); t.commit(); s.close(); s = openSession(); t = s.beginTransaction(); // one-to-many test updateQryString = "update SimpleEntityWithAssociation e " + "set e.name = 'updated' " + "where exists (" + " select a.id " + " from e.associatedEntities a " + " where a.name = 'one-to-many-association' " + ")"; count = s.createQuery( updateQryString ).executeUpdate(); assertEquals( 1, count ); // many-to-many test if ( getDialect().supportsSubqueryOnMutatingTable() ) { updateQryString = "update SimpleEntityWithAssociation e " + "set e.name = 'updated' " + "where exists (" + " select a.id " + " from e.manyToManyAssociatedEntities a " + " where a.name = 'many-to-many-association' " + ")"; count = s.createQuery( updateQryString ).executeUpdate(); assertEquals( 1, count ); } s.delete( entity.getManyToManyAssociatedEntities().iterator().next() ); s.delete( entity ); t.commit(); s.close(); } @Test public void testIncrementCounterVersion() { Session s = openSession(); Transaction t = s.beginTransaction(); IntegerVersioned entity = new IntegerVersioned( "int-vers" ); s.save( entity ); t.commit(); s.close(); int initialVersion = entity.getVersion(); s = openSession(); t = s.beginTransaction(); int count = s.createQuery( "update versioned IntegerVersioned set name = name" ).executeUpdate(); assertEquals( "incorrect exec count", 1, count ); t.commit(); t = s.beginTransaction(); entity = ( IntegerVersioned ) s.load( IntegerVersioned.class, entity.getId() ); assertEquals( "version not incremented", initialVersion + 1, entity.getVersion() ); s.delete( entity ); t.commit(); s.close(); } @Test public void testIncrementTimestampVersion() { Session s = openSession(); Transaction t = s.beginTransaction(); TimestampVersioned entity = new TimestampVersioned( "ts-vers" ); s.save( entity ); t.commit(); s.close(); Date initialVersion = entity.getVersion(); synchronized (this) { try { wait(1500); } catch (InterruptedException ie) {} } s = openSession(); t = s.beginTransaction(); int count = s.createQuery( "update versioned TimestampVersioned set name = name" ).executeUpdate(); assertEquals( "incorrect exec count", 1, count ); t.commit(); t = s.beginTransaction(); entity = ( TimestampVersioned ) s.load( TimestampVersioned.class, entity.getId() ); assertTrue( "version not incremented", entity.getVersion().after( initialVersion ) ); s.delete( entity ); t.commit(); s.close(); } @Test @SuppressWarnings( {"UnnecessaryUnboxing"}) public void testUpdateOnComponent() { Session s = openSession(); Transaction t = s.beginTransaction(); Human human = new Human(); human.setName( new Name( "Stevee", 'X', "Ebersole" ) ); s.save( human ); s.flush(); t.commit(); String correctName = "Steve"; t = s.beginTransaction(); int count = s.createQuery( "update Human set name.first = :correction where id = :id" ) .setString( "correction", correctName ) .setLong( "id", human.getId().longValue() ) .executeUpdate(); assertEquals( "Incorrect update count", 1, count ); t.commit(); t = s.beginTransaction(); s.refresh( human ); assertEquals( "Update did not execute properly", correctName, human.getName().getFirst() ); s.createQuery( "delete Human" ).executeUpdate(); t.commit(); s.close(); } @Test public void testUpdateOnManyToOne() { Session s = openSession(); Transaction t = s.beginTransaction(); s.createQuery( "update Animal a set a.mother = null where a.id = 2" ).executeUpdate(); if ( ! ( getDialect() instanceof MySQLDialect ) ) { // MySQL does not support (even un-correlated) subqueries against the update-mutating table s.createQuery( "update Animal a set a.mother = (from Animal where id = 1) where a.id = 2" ).executeUpdate(); } t.commit(); s.close(); } @Test public void testUpdateOnImplicitJoinFails() { Session s = openSession(); Transaction t = s.beginTransaction(); Human human = new Human(); human.setName( new Name( "Steve", 'E', null ) ); Human mother = new Human(); mother.setName( new Name( "Jane", 'E', null ) ); human.setMother( mother ); s.save( human ); s.save( mother ); s.flush(); t.commit(); t = s.beginTransaction(); try { s.createQuery( "update Human set mother.name.initial = :initial" ).setString( "initial", "F" ).executeUpdate(); fail( "update allowed across implicit join" ); } catch( QueryException e ) { } s.createQuery( "delete Human where mother is not null" ).executeUpdate(); s.createQuery( "delete Human" ).executeUpdate(); t.commit(); s.close(); } @Test @SuppressWarnings( {"UnnecessaryUnboxing"}) public void testUpdateOnDiscriminatorSubclass() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "update PettingZoo set name = name" ).executeUpdate(); assertEquals( "Incorrect discrim subclass update count", 1, count ); t.rollback(); t = s.beginTransaction(); count = s.createQuery( "update PettingZoo pz set pz.name = pz.name where pz.id = :id" ) .setLong( "id", data.pettingZoo.getId().longValue() ) .executeUpdate(); assertEquals( "Incorrect discrim subclass update count", 1, count ); t.rollback(); t = s.beginTransaction(); count = s.createQuery( "update Zoo as z set z.name = z.name" ).executeUpdate(); assertEquals( "Incorrect discrim subclass update count", 2, count ); t.rollback(); t = s.beginTransaction(); // TODO : not so sure this should be allowed. Seems to me that if they specify an alias, // property-refs should be required to be qualified. count = s.createQuery( "update Zoo as z set name = name where id = :id" ) .setLong( "id", data.zoo.getId().longValue() ) .executeUpdate(); assertEquals( "Incorrect discrim subclass update count", 1, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testUpdateOnAnimal() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "update Animal set description = description where description = :desc" ) .setString( "desc", data.frog.getDescription() ) .executeUpdate(); assertEquals( "Incorrect entity-updated count", 1, count ); count = s.createQuery( "update Animal set description = :newDesc where description = :desc" ) .setString( "desc", data.polliwog.getDescription() ) .setString( "newDesc", "Tadpole" ) .executeUpdate(); assertEquals( "Incorrect entity-updated count", 1, count ); Animal tadpole = ( Animal ) s.load( Animal.class, data.polliwog.getId() ); assertEquals( "Update did not take effect", "Tadpole", tadpole.getDescription() ); count = s.createQuery( "update Animal set bodyWeight = bodyWeight + :w1 + :w2" ) .setDouble( "w1", 1 ) .setDouble( "w2", 2 ) .executeUpdate(); assertEquals( "incorrect count on 'complex' update assignment", count, 6 ); if ( ! ( getDialect() instanceof MySQLDialect ) ) { // MySQL does not support (even un-correlated) subqueries against the update-mutating table s.createQuery( "update Animal set bodyWeight = ( select max(bodyWeight) from Animal )" ) .executeUpdate(); } t.commit(); s.close(); data.cleanup(); } @Test public void testUpdateOnMammal() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "update Mammal set description = description" ).executeUpdate(); assertEquals( "incorrect update count against 'middle' of joined-subclass hierarchy", 2, count ); count = s.createQuery( "update Mammal set bodyWeight = 25" ).executeUpdate(); assertEquals( "incorrect update count against 'middle' of joined-subclass hierarchy", 2, count ); if ( ! ( getDialect() instanceof MySQLDialect ) ) { // MySQL does not support (even un-correlated) subqueries against the update-mutating table count = s.createQuery( "update Mammal set bodyWeight = ( select max(bodyWeight) from Animal )" ).executeUpdate(); assertEquals( "incorrect update count against 'middle' of joined-subclass hierarchy", 2, count ); } t.commit(); s.close(); data.cleanup(); } @Test public void testUpdateSetNullUnionSubclass() { TestData data = new TestData(); data.prepare(); // These should reach out into *all* subclass tables... Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "update Vehicle set owner = 'Steve'" ).executeUpdate(); assertEquals( "incorrect restricted update count", 4, count ); count = s.createQuery( "update Vehicle set owner = null where owner = 'Steve'" ).executeUpdate(); assertEquals( "incorrect restricted update count", 4, count ); try { count = s.createQuery( "delete Vehicle where owner is null" ).executeUpdate(); assertEquals( "incorrect restricted delete count", 4, count ); } catch ( AssertionFailedError afe ) { if ( H2Dialect.class.isInstance( getDialect() ) ) { // http://groups.google.com/group/h2-database/t/5548ff9fd3abdb7 // this is fixed in H2 1.2.140 count = s.createQuery( "delete Vehicle" ).executeUpdate(); assertEquals( "incorrect count", 4, count ); } else { throw afe; } } t.commit(); s.close(); data.cleanup(); } @Test public void testUpdateSetNullOnDiscriminatorSubclass() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "update PettingZoo set address.city = null" ).executeUpdate(); assertEquals( "Incorrect discrim subclass delete count", 1, count ); count = s.createQuery( "delete Zoo where address.city is null" ).executeUpdate(); assertEquals( "Incorrect discrim subclass delete count", 1, count ); count = s.createQuery( "update Zoo set address.city = null" ).executeUpdate(); assertEquals( "Incorrect discrim subclass delete count", 1, count ); count = s.createQuery( "delete Zoo where address.city is null" ).executeUpdate(); assertEquals( "Incorrect discrim subclass delete count", 1, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testUpdateSetNullOnJoinedSubclass() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "update Mammal set bodyWeight = null" ).executeUpdate(); assertEquals( "Incorrect deletion count on joined subclass", 2, count ); count = s.createQuery( "delete Animal where bodyWeight = null" ).executeUpdate(); assertEquals( "Incorrect deletion count on joined subclass", 2, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testDeleteWithSubquery() { // setup the test data... Session s = openSession(); s.beginTransaction(); SimpleEntityWithAssociation owner = new SimpleEntityWithAssociation( "myEntity-1" ); owner.addAssociation( "assoc-1" ); owner.addAssociation( "assoc-2" ); owner.addAssociation( "assoc-3" ); s.save( owner ); SimpleEntityWithAssociation owner2 = new SimpleEntityWithAssociation( "myEntity-2" ); owner2.addAssociation( "assoc-1" ); owner2.addAssociation( "assoc-2" ); owner2.addAssociation( "assoc-3" ); owner2.addAssociation( "assoc-4" ); s.save( owner2 ); SimpleEntityWithAssociation owner3 = new SimpleEntityWithAssociation( "myEntity-3" ); s.save( owner3 ); s.getTransaction().commit(); s.close(); // now try the bulk delete s = openSession(); s.beginTransaction(); int count = s.createQuery( "delete SimpleEntityWithAssociation e where size( e.associatedEntities ) = 0 and e.name like '%'" ).executeUpdate(); assertEquals( "incorrect delete count", 1, count ); s.getTransaction().commit(); s.close(); // finally, clean up s = openSession(); s.beginTransaction(); s.createQuery( "delete SimpleAssociatedEntity" ).executeUpdate(); s.createQuery( "delete SimpleEntityWithAssociation" ).executeUpdate(); s.getTransaction().commit(); s.close(); } @Test @SuppressWarnings( {"UnnecessaryUnboxing"}) @RequiresDialectFeature( value = DialectChecks.HasSelfReferentialForeignKeyBugCheck.class, comment = "self referential FK bug" ) public void testSimpleDeleteOnAnimal() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "delete from Animal as a where a.id = :id" ) .setLong( "id", data.polliwog.getId().longValue() ) .executeUpdate(); assertEquals( "Incorrect delete count", 1, count ); count = s.createQuery( "delete Animal where id = :id" ) .setLong( "id", data.catepillar.getId().longValue() ) .executeUpdate(); assertEquals( "incorrect delete count", 1, count ); if ( getDialect().supportsSubqueryOnMutatingTable() ) { count = s.createQuery( "delete from User u where u not in (select u from User u)" ).executeUpdate(); assertEquals( 0, count ); } count = s.createQuery( "delete Animal a" ).executeUpdate(); assertEquals( "Incorrect delete count", 4, count ); List list = s.createQuery( "select a from Animal as a" ).list(); assertTrue( "table not empty", list.isEmpty() ); t.commit(); s.close(); data.cleanup(); } @Test public void testDeleteOnDiscriminatorSubclass() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "delete PettingZoo" ).executeUpdate(); assertEquals( "Incorrect discrim subclass delete count", 1, count ); count = s.createQuery( "delete Zoo" ).executeUpdate(); assertEquals( "Incorrect discrim subclass delete count", 1, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testDeleteOnJoinedSubclass() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "delete Mammal where bodyWeight > 150" ).executeUpdate(); assertEquals( "Incorrect deletion count on joined subclass", 1, count ); count = s.createQuery( "delete Mammal" ).executeUpdate(); assertEquals( "Incorrect deletion count on joined subclass", 1, count ); count = s.createQuery( "delete SubMulti" ).executeUpdate(); assertEquals( "Incorrect deletion count on joined subclass", 0, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testDeleteOnMappedJoin() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "delete Joiner where joinedName = :joinedName" ).setString( "joinedName", "joined-name" ).executeUpdate(); assertEquals( "Incorrect deletion count on joined subclass", 1, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testDeleteUnionSubclassAbstractRoot() { TestData data = new TestData(); data.prepare(); // These should reach out into *all* subclass tables... Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "delete Vehicle where owner = :owner" ).setString( "owner", "Steve" ).executeUpdate(); assertEquals( "incorrect restricted update count", 1, count ); count = s.createQuery( "delete Vehicle" ).executeUpdate(); assertEquals( "incorrect update count", 3, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testDeleteUnionSubclassConcreteSubclass() { TestData data = new TestData(); data.prepare(); // These should only affect the given table Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "delete Truck where owner = :owner" ).setString( "owner", "Steve" ).executeUpdate(); assertEquals( "incorrect restricted update count", 1, count ); count = s.createQuery( "delete Truck" ).executeUpdate(); assertEquals( "incorrect update count", 2, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testDeleteUnionSubclassLeafSubclass() { TestData data = new TestData(); data.prepare(); // These should only affect the given table Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "delete Car where owner = :owner" ).setString( "owner", "Kirsten" ).executeUpdate(); assertEquals( "incorrect restricted update count", 1, count ); count = s.createQuery( "delete Car" ).executeUpdate(); assertEquals( "incorrect update count", 0, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testDeleteWithMetadataWhereFragments() throws Throwable { Session s = openSession(); Transaction t = s.beginTransaction(); // Note: we are just checking the syntax here... s.createQuery("delete from Bar").executeUpdate(); s.createQuery("delete from Bar where barString = 's'").executeUpdate(); t.commit(); s.close(); } @Test public void testDeleteRestrictedOnManyToOne() { TestData data = new TestData(); data.prepare(); Session s = openSession(); Transaction t = s.beginTransaction(); int count = s.createQuery( "delete Animal where mother = :mother" ) .setEntity( "mother", data.butterfly ) .executeUpdate(); assertEquals( 1, count ); t.commit(); s.close(); data.cleanup(); } @Test public void testDeleteSyntaxWithCompositeId() { Session s = openSession(); Transaction t = s.beginTransaction(); s.createQuery( "delete EntityWithCrazyCompositeKey where id.id = 1 and id.otherId = 2" ).executeUpdate(); s.createQuery( "delete from EntityWithCrazyCompositeKey where id.id = 1 and id.otherId = 2" ).executeUpdate(); s.createQuery( "delete from EntityWithCrazyCompositeKey e where e.id.id = 1 and e.id.otherId = 2" ).executeUpdate(); t.commit(); s.close(); } @Test @TestForIssue( jiraKey = "HHH-8476" ) public void testManyToManyBulkDelete() { Session s = openSession(); Transaction t = s.beginTransaction(); Farm farm1 = new Farm(); farm1.setName( "farm1" ); Crop crop = new Crop(); crop.setName( "crop1" ); farm1.setCrops( new ArrayList() ); farm1.getCrops().add( crop ); s.save( farm1 ); Farm farm2 = new Farm(); farm2.setName( "farm2" ); farm2.setCrops( new ArrayList() ); farm2.getCrops().add( crop ); s.save( farm2 ); s.flush(); try { s.createQuery( "delete from Farm f where f.name='farm1'" ).executeUpdate(); assertEquals( s.createQuery( "from Farm" ).list().size(), 1 ); s.createQuery( "delete from Farm" ).executeUpdate(); assertEquals( s.createQuery( "from Farm" ).list().size(), 0 ); } catch (ConstraintViolationException cve) { fail("The join table was not cleared prior to the bulk delete."); } finally { t.rollback(); s.close(); } } @Test @TestForIssue( jiraKey = "HHH-1917" ) public void testManyToManyBulkDeleteMultiTable() { Session s = openSession(); Transaction t = s.beginTransaction(); Human friend = new Human(); friend.setName( new Name( "Bob", 'B', "Bobbert" ) ); s.save( friend ); Human brett = new Human(); brett.setName( new Name( "Brett", 'E', "Meyer" ) ); brett.setFriends( new ArrayList() ); brett.getFriends().add( friend ); s.save( brett ); s.flush(); try { // multitable (joined subclass) s.createQuery( "delete from Human" ).executeUpdate(); assertEquals( s.createQuery( "from Human" ).list().size(), 0 ); } catch (ConstraintViolationException cve) { fail("The join table was not cleared prior to the bulk delete."); } finally { t.rollback(); s.close(); } } private class TestData { private Animal polliwog; private Animal catepillar; private Animal frog; private Animal butterfly; private Zoo zoo; private Zoo pettingZoo; private void prepare() { Session s = openSession(); Transaction txn = s.beginTransaction(); polliwog = new Animal(); polliwog.setBodyWeight( 12 ); polliwog.setDescription( "Polliwog" ); catepillar = new Animal(); catepillar.setBodyWeight( 10 ); catepillar.setDescription( "Catepillar" ); frog = new Animal(); frog.setBodyWeight( 34 ); frog.setDescription( "Frog" ); polliwog.setFather( frog ); frog.addOffspring( polliwog ); butterfly = new Animal(); butterfly.setBodyWeight( 9 ); butterfly.setDescription( "Butterfly" ); catepillar.setMother( butterfly ); butterfly.addOffspring( catepillar ); s.save( frog ); s.save( polliwog ); s.save( butterfly ); s.save( catepillar ); Dog dog = new Dog(); dog.setBodyWeight( 200 ); dog.setDescription( "dog" ); s.save( dog ); Cat cat = new Cat(); cat.setBodyWeight( 100 ); cat.setDescription( "cat" ); s.save( cat ); zoo = new Zoo(); zoo.setName( "Zoo" ); Address add = new Address(); add.setCity("MEL"); add.setCountry("AU"); add.setStreet("Main st"); add.setPostalCode("3000"); zoo.setAddress(add); pettingZoo = new PettingZoo(); pettingZoo.setName( "Petting Zoo" ); Address addr = new Address(); addr.setCity("Sydney"); addr.setCountry("AU"); addr.setStreet("High st"); addr.setPostalCode("2000"); pettingZoo.setAddress(addr); s.save( zoo ); s.save( pettingZoo ); Joiner joiner = new Joiner(); joiner.setJoinedName( "joined-name" ); joiner.setName( "name" ); s.save( joiner ); Car car = new Car(); car.setVin( "123c" ); car.setOwner( "Kirsten" ); s.save( car ); Truck truck = new Truck(); truck.setVin( "123t" ); truck.setOwner( "Steve" ); s.save( truck ); SUV suv = new SUV(); suv.setVin( "123s" ); suv.setOwner( "Joe" ); s.save( suv ); Pickup pickup = new Pickup(); pickup.setVin( "123p" ); pickup.setOwner( "Cecelia" ); s.save( pickup ); BooleanLiteralEntity bool = new BooleanLiteralEntity(); s.save( bool ); txn.commit(); s.close(); } private void cleanup() { Session s = openSession(); Transaction txn = s.beginTransaction(); // workaround awesome HSQLDB "feature" s.createQuery( "delete from Animal where mother is not null or father is not null" ).executeUpdate(); s.createQuery( "delete from Animal" ).executeUpdate(); s.createQuery( "delete from Zoo" ).executeUpdate(); s.createQuery( "delete from Joiner" ).executeUpdate(); s.createQuery( "delete from Vehicle" ).executeUpdate(); s.createQuery( "delete from BooleanLiteralEntity" ).executeUpdate(); txn.commit(); s.close(); } } }
/* * Copyright 2012-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.codehaus.griffon.ast; import griffon.plugins.hbase.DefaultHBaseProvider; import griffon.plugins.hbase.HBaseAware; import griffon.plugins.hbase.HBaseContributionHandler; import griffon.plugins.hbase.HBaseProvider; import lombok.core.handlers.HBaseAwareConstants; import org.codehaus.groovy.ast.*; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.control.CompilePhase; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.control.messages.SimpleMessage; import org.codehaus.groovy.transform.GroovyASTTransformation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.codehaus.griffon.ast.GriffonASTUtils.*; /** * Handles generation of code for the {@code @HBaseAware} annotation. * <p/> * * @author Andres Almiray */ @GroovyASTTransformation(phase = CompilePhase.CANONICALIZATION) public class HBaseAwareASTTransformation extends AbstractASTTransformation implements HBaseAwareConstants { private static final Logger LOG = LoggerFactory.getLogger(HBaseAwareASTTransformation.class); private static final ClassNode HBASE_CONTRIBUTION_HANDLER_CNODE = makeClassSafe(HBaseContributionHandler.class); private static final ClassNode HBASE_AWARE_CNODE = makeClassSafe(HBaseAware.class); private static final ClassNode HBASE_PROVIDER_CNODE = makeClassSafe(HBaseProvider.class); private static final ClassNode DEFAULT_HBASE_PROVIDER_CNODE = makeClassSafe(DefaultHBaseProvider.class); private static final String[] DELEGATING_METHODS = new String[] { METHOD_WITH_HBASE, METHOD_WITH_HTABLE }; static { Arrays.sort(DELEGATING_METHODS); } /** * Convenience method to see if an annotated node is {@code @HBaseAware}. * * @param node the node to check * @return true if the node is an event publisher */ public static boolean hasHBaseAwareAnnotation(AnnotatedNode node) { for (AnnotationNode annotation : node.getAnnotations()) { if (HBASE_AWARE_CNODE.equals(annotation.getClassNode())) { return true; } } return false; } /** * Handles the bulk of the processing, mostly delegating to other methods. * * @param nodes the ast nodes * @param source the source unit for the nodes */ public void visit(ASTNode[] nodes, SourceUnit source) { checkNodesForAnnotationAndType(nodes[0], nodes[1]); addHBaseContributionIfNeeded(source, (ClassNode) nodes[1]); } public static void addHBaseContributionIfNeeded(SourceUnit source, ClassNode classNode) { if (needsHBaseContribution(classNode, source)) { if (LOG.isDebugEnabled()) { LOG.debug("Injecting " + HBaseContributionHandler.class.getName() + " into " + classNode.getName()); } apply(classNode); } } protected static boolean needsHBaseContribution(ClassNode declaringClass, SourceUnit sourceUnit) { boolean found1 = false, found2 = false, found3 = false, found4 = false, found5 = false, found6 = false; ClassNode consideredClass = declaringClass; while (consideredClass != null) { for (MethodNode method : consideredClass.getMethods()) { // just check length, MOP will match it up found1 = method.getName().equals(METHOD_WITH_HBASE) && method.getParameters().length == 1; found2 = method.getName().equals(METHOD_WITH_HBASE) && method.getParameters().length == 2; found3 = method.getName().equals(METHOD_WITH_HTABLE) && method.getParameters().length == 2; found4 = method.getName().equals(METHOD_WITH_HTABLE) && method.getParameters().length == 3; found5 = method.getName().equals(METHOD_SET_HBASE_PROVIDER) && method.getParameters().length == 1; found6 = method.getName().equals(METHOD_GET_HBASE_PROVIDER) && method.getParameters().length == 0; if (found1 && found2 && found3 && found4 && found5 && found6) { return false; } } consideredClass = consideredClass.getSuperClass(); } if (found1 || found2 || found3 || found4 || found5 || found6) { sourceUnit.getErrorCollector().addErrorAndContinue( new SimpleMessage("@HBaseAware cannot be processed on " + declaringClass.getName() + " because some but not all of methods from " + HBaseContributionHandler.class.getName() + " were declared in the current class or super classes.", sourceUnit) ); return false; } return true; } public static void apply(ClassNode declaringClass) { injectInterface(declaringClass, HBASE_CONTRIBUTION_HANDLER_CNODE); // add field: // protected HBaseProvider this$hbaseProvider = DefaultHBaseProvider.instance FieldNode providerField = declaringClass.addField( HBASE_PROVIDER_FIELD_NAME, ACC_PRIVATE | ACC_SYNTHETIC, HBASE_PROVIDER_CNODE, defaultHBaseProviderInstance()); // add method: // HBaseProvider getHBaseProvider() { // return this$hbaseProvider // } injectMethod(declaringClass, new MethodNode( METHOD_GET_HBASE_PROVIDER, ACC_PUBLIC, HBASE_PROVIDER_CNODE, Parameter.EMPTY_ARRAY, NO_EXCEPTIONS, returns(field(providerField)) )); // add method: // void setHBaseProvider(HBaseProvider provider) { // this$hbaseProvider = provider ?: DefaultHBaseProvider.instance // } injectMethod(declaringClass, new MethodNode( METHOD_SET_HBASE_PROVIDER, ACC_PUBLIC, ClassHelper.VOID_TYPE, params( param(HBASE_PROVIDER_CNODE, PROVIDER)), NO_EXCEPTIONS, block( ifs_no_return( cmp(var(PROVIDER), ConstantExpression.NULL), assigns(field(providerField), defaultHBaseProviderInstance()), assigns(field(providerField), var(PROVIDER)) ) ) )); for (MethodNode method : HBASE_CONTRIBUTION_HANDLER_CNODE.getMethods()) { if (Arrays.binarySearch(DELEGATING_METHODS, method.getName()) < 0) continue; List<Expression> variables = new ArrayList<Expression>(); Parameter[] parameters = new Parameter[method.getParameters().length]; for (int i = 0; i < method.getParameters().length; i++) { Parameter p = method.getParameters()[i]; parameters[i] = new Parameter(makeClassSafe(p.getType()), p.getName()); parameters[i].getType().setGenericsTypes(p.getType().getGenericsTypes()); variables.add(var(p.getName())); } ClassNode returnType = makeClassSafe(method.getReturnType()); returnType.setGenericsTypes(method.getReturnType().getGenericsTypes()); returnType.setGenericsPlaceHolder(method.getReturnType().isGenericsPlaceHolder()); MethodNode newMethod = new MethodNode( method.getName(), ACC_PUBLIC, returnType, parameters, NO_EXCEPTIONS, returns(call( field(providerField), method.getName(), args(variables))) ); newMethod.setGenericsTypes(method.getGenericsTypes()); injectMethod(declaringClass, newMethod); } } private static Expression defaultHBaseProviderInstance() { return call(DEFAULT_HBASE_PROVIDER_CNODE, "getInstance", NO_ARGS); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.audit; import org.apache.nifi.action.Action; import org.apache.nifi.action.Component; import org.apache.nifi.action.FlowChangeAction; import org.apache.nifi.action.Operation; import org.apache.nifi.action.details.FlowChangeConfigureDetails; import org.apache.nifi.authorization.user.NiFiUser; import org.apache.nifi.authorization.user.NiFiUserUtils; import org.apache.nifi.parameter.Parameter; import org.apache.nifi.parameter.ParameterContext; import org.apache.nifi.web.api.dto.ParameterContextDTO; import org.apache.nifi.web.api.dto.ParameterDTO; import org.apache.nifi.web.dao.ParameterContextDAO; import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Aspect; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Map; import java.util.stream.Collectors; /** * Audits processor creation/removal and configuration changes. */ @Aspect public class ParameterContextAuditor extends NiFiAuditor { private static final Logger logger = LoggerFactory.getLogger(ParameterContextAuditor.class); /** * Audits the creation of parameter contexts via createParameterContext(). * * @param proceedingJoinPoint join point * @param parameterContextDTO dto * @param parameterContextDAO dao * @return context * @throws Throwable ex */ @Around("within(org.apache.nifi.web.dao.ParameterContextDAO+) && " + "execution(org.apache.nifi.parameter.ParameterContext createParameterContext(org.apache.nifi.web.api.dto.ParameterContextDTO)) && " + "args(parameterContextDTO) && " + "target(parameterContextDAO)") public ParameterContext createParameterContextAdvice(ProceedingJoinPoint proceedingJoinPoint, ParameterContextDTO parameterContextDTO, ParameterContextDAO parameterContextDAO) throws Throwable { // update the processor state ParameterContext parameterContext = (ParameterContext) proceedingJoinPoint.proceed(); // get the current user NiFiUser user = NiFiUserUtils.getNiFiUser(); // ensure the user was found if (user != null) { // create a parameter context action Collection<Action> actions = new ArrayList<>(); // if no exceptions were thrown, add the processor action... final Action createAction = generateAuditRecord(parameterContext, Operation.Add); actions.add(createAction); // determine the updated values Map<String, String> updatedValues = extractConfiguredParameterContextValues(parameterContext, parameterContextDTO); // determine the actions performed in this request final Date actionTimestamp = new Date(createAction.getTimestamp().getTime() + 1); determineActions(user, parameterContext, actions, actionTimestamp, updatedValues, Collections.EMPTY_MAP); // save the actions saveActions(actions, logger); } return parameterContext; } /** * Audits the configuration of a parameter context via updateParameterContext(). * * @param proceedingJoinPoint join point * @param parameterContextDTO dto * @param parameterContextDAO dao * @return parameter context * @throws Throwable ex */ @Around("within(org.apache.nifi.web.dao.ParameterContextDAO+) && " + "execution(org.apache.nifi.parameter.ParameterContext updateParameterContext(org.apache.nifi.web.api.dto.ParameterContextDTO)) && " + "args(parameterContextDTO) && " + "target(parameterContextDAO)") public ParameterContext updateParameterContextAdvice(ProceedingJoinPoint proceedingJoinPoint, ParameterContextDTO parameterContextDTO, ParameterContextDAO parameterContextDAO) throws Throwable { // determine the initial values for each property/setting that's changing ParameterContext parameterContext = parameterContextDAO.getParameterContext(parameterContextDTO.getId()); final Map<String, String> values = extractConfiguredParameterContextValues(parameterContext, parameterContextDTO); // update the processor state final ParameterContext updatedParameterContext = (ParameterContext) proceedingJoinPoint.proceed(); // if no exceptions were thrown, add the processor action... parameterContext = parameterContextDAO.getParameterContext(updatedParameterContext.getIdentifier()); // get the current user NiFiUser user = NiFiUserUtils.getNiFiUser(); // ensure the user was found if (user != null) { // determine the updated values Map<String, String> updatedValues = extractConfiguredParameterContextValues(parameterContext, parameterContextDTO); // create a parameter context action Date actionTimestamp = new Date(); Collection<Action> actions = new ArrayList<>(); // determine the actions performed in this request determineActions(user, parameterContext, actions, actionTimestamp, updatedValues, values); // ensure there are actions to record if (!actions.isEmpty()) { // save the actions saveActions(actions, logger); } } return updatedParameterContext; } /** * Extract configuration changes. * * @param user the user * @param parameterContext the parameter context * @param actions actions list * @param actionTimestamp timestamp of the request * @param updatedValues the updated values * @param values the current values */ private void determineActions(final NiFiUser user, final ParameterContext parameterContext, final Collection<Action> actions, final Date actionTimestamp, final Map<String, String> updatedValues, final Map<String, String> values) { // go through each updated value for (String key : updatedValues.keySet()) { String newValue = updatedValues.get(key); String oldValue = values.get(key); Operation operation = null; // determine the type of operation if (oldValue == null || newValue == null || !newValue.equals(oldValue)) { operation = Operation.Configure; } // create a configuration action accordingly if (operation != null) { // clear the value if this property is sensitive final Parameter parameter = parameterContext.getParameter(key).orElse(null); if (parameter != null && parameter.getDescriptor().isSensitive()) { if (newValue != null) { newValue = "********"; } if (oldValue != null) { oldValue = "********"; } } final FlowChangeConfigureDetails actionDetails = new FlowChangeConfigureDetails(); actionDetails.setName(key); actionDetails.setValue(newValue); actionDetails.setPreviousValue(oldValue); // create a configuration action FlowChangeAction configurationAction = new FlowChangeAction(); configurationAction.setUserIdentity(user.getIdentity()); configurationAction.setOperation(operation); configurationAction.setTimestamp(actionTimestamp); configurationAction.setSourceId(parameterContext.getIdentifier()); configurationAction.setSourceName(parameterContext.getName()); configurationAction.setSourceType(Component.ParameterContext); configurationAction.setActionDetails(actionDetails); actions.add(configurationAction); } } } /** * Audits the removal of a parameter context via deleteParameterContext(). * * @param proceedingJoinPoint join point * @param parameterContextId parameterContextId * @param parameterContextDAO dao * @throws Throwable ex */ @Around("within(org.apache.nifi.web.dao.ParameterContextDAO+) && " + "execution(void deleteParameterContext(java.lang.String)) && " + "args(parameterContextId) && " + "target(parameterContextDAO)") public void removeParameterContextAdvice(ProceedingJoinPoint proceedingJoinPoint, String parameterContextId, ParameterContextDAO parameterContextDAO) throws Throwable { // get the parameter context before removing it ParameterContext parameterContext = parameterContextDAO.getParameterContext(parameterContextId); // remove the processor proceedingJoinPoint.proceed(); // if no exceptions were thrown, add removal actions... // audit the processor removal final Action action = generateAuditRecord(parameterContext, Operation.Remove); // save the actions if (action != null) { saveAction(action, logger); } } /** * Generates an audit record for the creation of a ParameterContext. * * @param parameterContext parameterContext * @param operation operation * @return action */ private Action generateAuditRecord(ParameterContext parameterContext, Operation operation) { FlowChangeAction action = null; // get the current user NiFiUser user = NiFiUserUtils.getNiFiUser(); // ensure the user was found if (user != null) { // create the processor action for adding this processor action = new FlowChangeAction(); action.setUserIdentity(user.getIdentity()); action.setOperation(operation); action.setTimestamp(new Date()); action.setSourceId(parameterContext.getIdentifier()); action.setSourceName(parameterContext.getName()); action.setSourceType(Component.ParameterContext); } return action; } /** * Extracts the values for the configured fields from the specified ParameterContext. */ private Map<String, String> extractConfiguredParameterContextValues(ParameterContext parameterContext, ParameterContextDTO parameterContextDTO) { Map<String, String> values = new HashMap<>(); if (parameterContextDTO.getDescription() != null) { values.put("Name", parameterContext.getName()); } if (parameterContextDTO.getDescription() != null) { values.put("Description", parameterContext.getDescription()); } if (parameterContextDTO.getParameters() != null) { parameterContextDTO.getParameters().forEach(parameterEntity -> { final ParameterDTO parameterDTO = parameterEntity.getParameter(); final Parameter parameter = parameterContext.getParameter(parameterDTO.getName()).orElse(null); if (parameter == null) { values.put(parameterDTO.getName(), null); } else { values.put(parameterDTO.getName(), parameter.getValue()); } }); } if (!parameterContext.getInheritedParameterContexts().isEmpty()) { values.put("Inherited Parameter Contexts", parameterContext.getInheritedParameterContexts() .stream().map(pc -> pc.getIdentifier()).collect(Collectors.joining(", "))); } return values; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.rel.logical; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.rel.RelCollation; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Window; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexFieldCollation; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexLocalRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexOver; import org.apache.calcite.rex.RexProgram; import org.apache.calcite.rex.RexShuttle; import org.apache.calcite.rex.RexWindow; import org.apache.calcite.rex.RexWindowBound; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Pair; import com.google.common.base.Objects; import com.google.common.base.Predicate; import com.google.common.collect.Iterables; import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import java.util.AbstractList; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Sub-class of {@link org.apache.calcite.rel.core.Window} * not targeted at any particular engine or calling convention. */ public final class LogicalWindow extends Window { /** * Creates a LogicalWindow. * * @param cluster Cluster * @param child Input relational expression * @param constants List of constants that are additional inputs * @param rowType Output row type * @param groups Windows */ public LogicalWindow( RelOptCluster cluster, RelTraitSet traits, RelNode child, List<RexLiteral> constants, RelDataType rowType, List<Group> groups) { super(cluster, traits, child, constants, rowType, groups); } @Override public LogicalWindow copy(RelTraitSet traitSet, List<RelNode> inputs) { return new LogicalWindow(getCluster(), traitSet, sole(inputs), constants, rowType, groups); } /** * Creates a LogicalWindow. */ public static RelNode create( RelOptCluster cluster, RelTraitSet traitSet, RelNode child, final RexProgram program) { final RelDataType outRowType = program.getOutputRowType(); // Build a list of distinct groups, partitions and aggregate // functions. final Multimap<WindowKey, RexOver> windowMap = LinkedListMultimap.create(); final int inputFieldCount = child.getRowType().getFieldCount(); final Map<RexLiteral, RexInputRef> constantPool = new HashMap<RexLiteral, RexInputRef>(); final List<RexLiteral> constants = new ArrayList<RexLiteral>(); // Identify constants in the expression tree and replace them with // references to newly generated constant pool. RexShuttle replaceConstants = new RexShuttle() { @Override public RexNode visitLiteral(RexLiteral literal) { RexInputRef ref = constantPool.get(literal); if (ref != null) { return ref; } constants.add(literal); ref = new RexInputRef(constantPool.size() + inputFieldCount, literal.getType()); constantPool.put(literal, ref); return ref; } }; // Build a list of groups, partitions, and aggregate functions. Each // aggregate function will add its arguments as outputs of the input // program. for (RexNode agg : program.getExprList()) { if (agg instanceof RexOver) { RexOver over = (RexOver) agg; over = (RexOver) over.accept(replaceConstants); addWindows(windowMap, over, inputFieldCount); } } final Map<RexOver, Window.RexWinAggCall> aggMap = new HashMap<RexOver, Window.RexWinAggCall>(); List<Group> groups = new ArrayList<Group>(); for (Map.Entry<WindowKey, Collection<RexOver>> entry : windowMap.asMap().entrySet()) { final WindowKey windowKey = entry.getKey(); final List<RexWinAggCall> aggCalls = new ArrayList<RexWinAggCall>(); for (RexOver over : entry.getValue()) { final RexWinAggCall aggCall = new RexWinAggCall( over.getAggOperator(), over.getType(), toInputRefs(over.operands), aggMap.size()); aggCalls.add(aggCall); aggMap.put(over, aggCall); } RexShuttle toInputRefs = new RexShuttle() { @Override public RexNode visitLocalRef(RexLocalRef localRef) { return new RexInputRef(localRef.getIndex(), localRef.getType()); } }; groups.add( new Group( windowKey.groupSet, windowKey.isRows, windowKey.lowerBound.accept(toInputRefs), windowKey.upperBound.accept(toInputRefs), windowKey.orderKeys, aggCalls)); } // Figure out the type of the inputs to the output program. // They are: the inputs to this rel, followed by the outputs of // each window. final List<Window.RexWinAggCall> flattenedAggCallList = new ArrayList<Window.RexWinAggCall>(); List<Map.Entry<String, RelDataType>> fieldList = new ArrayList<Map.Entry<String, RelDataType>>( child.getRowType().getFieldList()); final int offset = fieldList.size(); // Use better field names for agg calls that are projected. Map<Integer, String> fieldNames = new HashMap<Integer, String>(); for (Ord<RexLocalRef> ref : Ord.zip(program.getProjectList())) { final int index = ref.e.getIndex(); if (index >= offset) { fieldNames.put( index - offset, outRowType.getFieldNames().get(ref.i)); } } for (Ord<Group> window : Ord.zip(groups)) { for (Ord<RexWinAggCall> over : Ord.zip(window.e.aggCalls)) { // Add the k-th over expression of // the i-th window to the output of the program. String name = fieldNames.get(over.i); if (name == null || name.startsWith("$")) { name = "w" + window.i + "$o" + over.i; } fieldList.add(Pair.of(name, over.e.getType())); flattenedAggCallList.add(over.e); } } final RelDataType intermediateRowType = cluster.getTypeFactory().createStructType(fieldList); // The output program is the windowed agg's program, combined with // the output calc (if it exists). RexShuttle shuttle = new RexShuttle() { public RexNode visitOver(RexOver over) { // Look up the aggCall which this expr was translated to. final Window.RexWinAggCall aggCall = aggMap.get(over); assert aggCall != null; assert RelOptUtil.eq( "over", over.getType(), "aggCall", aggCall.getType(), true); // Find the index of the aggCall among all partitions of all // groups. final int aggCallIndex = flattenedAggCallList.indexOf(aggCall); assert aggCallIndex >= 0; // Replace expression with a reference to the window slot. final int index = inputFieldCount + aggCallIndex; assert RelOptUtil.eq( "over", over.getType(), "intermed", intermediateRowType.getFieldList().get(index).getType(), true); return new RexInputRef( index, over.getType()); } public RexNode visitLocalRef(RexLocalRef localRef) { final int index = localRef.getIndex(); if (index < inputFieldCount) { // Reference to input field. return localRef; } return new RexLocalRef( flattenedAggCallList.size() + index, localRef.getType()); } }; // TODO: The order that the "over" calls occur in the groups and // partitions may not match the order in which they occurred in the // original expression. We should add a project to permute them. LogicalWindow window = new LogicalWindow( cluster, traitSet, child, constants, intermediateRowType, groups); return RelOptUtil.createProject( window, toInputRefs(program.getProjectList()), outRowType.getFieldNames()); } private static List<RexNode> toInputRefs( final List<? extends RexNode> operands) { return new AbstractList<RexNode>() { public int size() { return operands.size(); } public RexNode get(int index) { final RexNode operand = operands.get(index); if (operand instanceof RexInputRef) { return operand; } assert operand instanceof RexLocalRef; final RexLocalRef ref = (RexLocalRef) operand; return new RexInputRef(ref.getIndex(), ref.getType()); } }; } /** Group specification. All windowed aggregates over the same window * (regardless of how it is specified, in terms of a named window or specified * attribute by attribute) will end up with the same window key. */ private static class WindowKey { private final ImmutableBitSet groupSet; private final RelCollation orderKeys; private final boolean isRows; private final RexWindowBound lowerBound; private final RexWindowBound upperBound; public WindowKey( ImmutableBitSet groupSet, RelCollation orderKeys, boolean isRows, RexWindowBound lowerBound, RexWindowBound upperBound) { this.groupSet = groupSet; this.orderKeys = orderKeys; this.isRows = isRows; this.lowerBound = lowerBound; this.upperBound = upperBound; } @Override public int hashCode() { return com.google.common.base.Objects.hashCode(groupSet, orderKeys, isRows, lowerBound, upperBound); } @Override public boolean equals(Object obj) { return obj == this || obj instanceof WindowKey && groupSet.equals(((WindowKey) obj).groupSet) && orderKeys.equals(((WindowKey) obj).orderKeys) && Objects.equal(lowerBound, ((WindowKey) obj).lowerBound) && Objects.equal(upperBound, ((WindowKey) obj).upperBound) && isRows == ((WindowKey) obj).isRows; } } private static void addWindows( Multimap<WindowKey, RexOver> windowMap, RexOver over, final int inputFieldCount) { final RexWindow aggWindow = over.getWindow(); // Look up or create a window. RelCollation orderKeys = getCollation( Lists.newArrayList( Iterables.filter(aggWindow.orderKeys, new Predicate<RexFieldCollation>() { public boolean apply(RexFieldCollation rexFieldCollation) { // If ORDER BY references constant (i.e. RexInputRef), // then we can ignore such ORDER BY key. return rexFieldCollation.left instanceof RexLocalRef; } }))); ImmutableBitSet groupSet = ImmutableBitSet.of(getProjectOrdinals(aggWindow.partitionKeys)); final int groupLength = groupSet.length(); if (inputFieldCount < groupLength) { // If PARTITION BY references constant, we can ignore such partition key. // All the inputs after inputFieldCount are literals, thus we can clear. groupSet = groupSet.except(ImmutableBitSet.range(inputFieldCount, groupLength)); } WindowKey windowKey = new WindowKey( groupSet, orderKeys, aggWindow.isRows(), aggWindow.getLowerBound(), aggWindow.getUpperBound()); windowMap.put(windowKey, over); } } // End LogicalWindow.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.percolate; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest; import org.elasticsearch.action.support.single.shard.TransportShardSingleOperationAction; import org.elasticsearch.cluster.ClusterService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.StringText; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.percolator.PercolatorService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** */ public class TransportShardMultiPercolateAction extends TransportShardSingleOperationAction<TransportShardMultiPercolateAction.Request, TransportShardMultiPercolateAction.Response> { private final PercolatorService percolatorService; private static final String ACTION_NAME = MultiPercolateAction.NAME + "[shard]"; @Inject public TransportShardMultiPercolateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, PercolatorService percolatorService, ActionFilters actionFilters) { super(settings, ACTION_NAME, threadPool, clusterService, transportService, actionFilters, Request.class, ThreadPool.Names.PERCOLATE); this.percolatorService = percolatorService; } @Override protected boolean isSubAction() { return true; } @Override protected Response newResponse() { return new Response(); } @Override protected boolean resolveIndex() { return false; } @Override protected ShardIterator shards(ClusterState state, InternalRequest request) throws ElasticsearchException { return clusterService.operationRouting().getShards( state, request.concreteIndex(), request.request().shardId(), request.request().preference ); } @Override protected Response shardOperation(Request request, ShardId shardId) throws ElasticsearchException { // TODO: Look into combining the shard req's docs into one in memory index. Response response = new Response(); response.items = new ArrayList<>(request.items.size()); for (Request.Item item : request.items) { Response.Item responseItem; int slot = item.slot; try { responseItem = new Response.Item(slot, percolatorService.percolate(item.request)); } catch (Throwable t) { if (TransportActions.isShardNotAvailableException(t)) { throw (ElasticsearchException) t; } else { logger.debug("{} failed to multi percolate", t, request.shardId()); responseItem = new Response.Item(slot, new StringText(ExceptionsHelper.detailedMessage(t))); } } response.items.add(responseItem); } return response; } public static class Request extends SingleShardOperationRequest implements IndicesRequest { private int shardId; private String preference; private List<Item> items; Request() { } Request(MultiPercolateRequest multiPercolateRequest, String concreteIndex, int shardId, String preference) { super(multiPercolateRequest, concreteIndex); this.shardId = shardId; this.preference = preference; this.items = new ArrayList<>(); } @Override public String[] indices() { List<String> indices = new ArrayList<>(); for (Item item : items) { Collections.addAll(indices, item.request.indices()); } return indices.toArray(new String[indices.size()]); } public int shardId() { return shardId; } public void add(Item item) { items.add(item); } public List<Item> items() { return items; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); shardId = in.readVInt(); preference = in.readOptionalString(); int size = in.readVInt(); items = new ArrayList<>(size); for (int i = 0; i < size; i++) { int slot = in.readVInt(); OriginalIndices originalIndices = OriginalIndices.readOriginalIndices(in); PercolateShardRequest shardRequest = new PercolateShardRequest(new ShardId(index, shardId), originalIndices); shardRequest.documentType(in.readString()); shardRequest.source(in.readBytesReference()); shardRequest.docSource(in.readBytesReference()); shardRequest.onlyCount(in.readBoolean()); Item item = new Item(slot, shardRequest); items.add(item); } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(shardId); out.writeOptionalString(preference); out.writeVInt(items.size()); for (Item item : items) { out.writeVInt(item.slot); OriginalIndices.writeOriginalIndices(item.request.originalIndices(), out); out.writeString(item.request.documentType()); out.writeBytesReference(item.request.source()); out.writeBytesReference(item.request.docSource()); out.writeBoolean(item.request.onlyCount()); } } static class Item { private final int slot; private final PercolateShardRequest request; public Item(int slot, PercolateShardRequest request) { this.slot = slot; this.request = request; } public int slot() { return slot; } public PercolateShardRequest request() { return request; } } } public static class Response extends ActionResponse { private List<Item> items; public List<Item> items() { return items; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVInt(items.size()); for (Item item : items) { out.writeVInt(item.slot); if (item.response != null) { out.writeBoolean(true); item.response.writeTo(out); } else { out.writeBoolean(false); out.writeText(item.error); } } } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); int size = in.readVInt(); items = new ArrayList<>(size); for (int i = 0; i < size; i++) { int slot = in.readVInt(); if (in.readBoolean()) { PercolateShardResponse shardResponse = new PercolateShardResponse(); shardResponse.readFrom(in); items.add(new Item(slot, shardResponse)); } else { items.add(new Item(slot, in.readText())); } } } public static class Item { private final int slot; private final PercolateShardResponse response; private final Text error; public Item(Integer slot, PercolateShardResponse response) { this.slot = slot; this.response = response; this.error = null; } public Item(Integer slot, Text error) { this.slot = slot; this.error = error; this.response = null; } public int slot() { return slot; } public PercolateShardResponse response() { return response; } public Text error() { return error; } public boolean failed() { return error != null; } } } }
package com.github.ayltai.foscam.client.camera; import java.util.concurrent.TimeUnit; import android.graphics.Bitmap; import android.support.annotation.NonNull; import android.support.annotation.UiThread; import android.support.annotation.VisibleForTesting; import com.github.ayltai.foscam.client.Constants; import com.github.ayltai.foscam.client.Presenter; import com.github.ayltai.foscam.client.R; import com.github.ayltai.foscam.client.data.Camera; import com.github.ayltai.foscam.client.util.ImageUtils; import com.github.ayltai.foscam.sdk.BaseResponse; import com.github.ayltai.foscam.sdk.ResultCode; import com.github.ayltai.foscam.sdk.control.ControlService; import com.github.ayltai.foscam.sdk.device.DeviceService; import com.github.ayltai.foscam.sdk.video.VideoService; import rx.Observable; import rx.android.schedulers.AndroidSchedulers; import rx.schedulers.Schedulers; import rx.subscriptions.CompositeSubscription; public /* final */ class CameraPresenter extends Presenter<CameraPresenter.View> { public interface View extends Presenter.View { @UiThread void bind(@NonNull Camera camera); @UiThread void showPanTiltControls(boolean show); @UiThread void showZoomControls(boolean show); @UiThread void updateImage(@NonNull Bitmap bitmap); @UiThread void saveImage(); @NonNull Observable<Void> moveLeft(); @NonNull Observable<Void> moveRight(); @NonNull Observable<Void> moveUp(); @NonNull Observable<Void> moveDown(); @NonNull Observable<Void> goHome(); @NonNull Observable<Void> toggleLight(); @NonNull Observable<Void> snapshot(); @NonNull Observable<Void> zoomIn(); @NonNull Observable<Void> zoomOut(); @UiThread void showErrorMessage(@NonNull CharSequence errorMessage); } private final Camera camera; //region Services private ControlService controlService; private DeviceService deviceService; private VideoService videoService; //endregion private CompositeSubscription subscriptions; public CameraPresenter(@NonNull final Camera camera) { this.camera = camera; } //region Lifecycle @Override public void onViewAttached(@NonNull final CameraPresenter.View view) { super.onViewAttached(view); if (this.controlService == null) this.controlService = this.createService(camera.getAddress(), camera.getPort(), camera.getUserName(), camera.getPassword(), ControlService.class); if (this.deviceService == null) this.deviceService = this.createService(camera.getAddress(), camera.getPort(), camera.getUserName(), camera.getPassword(), DeviceService.class); if (this.videoService == null) this.videoService = this.createService(camera.getAddress(), camera.getPort(), camera.getUserName(), camera.getPassword(), VideoService.class); this.view.bind(this.camera); if (this.subscriptions == null) this.subscriptions = new CompositeSubscription(); this.checkDeviceCapability(); this.subscribeToImageUpdates(); this.subscribeTo(this.view.moveLeft(), "Move left", this.controlService.moveLeft(), this.controlService.stopMoving(), R.string.error_move_left); this.subscribeTo(this.view.moveRight(), "Move right", this.controlService.moveRight(), this.controlService.stopMoving(), R.string.error_move_right); this.subscribeTo(this.view.moveUp(), "Move up", this.controlService.moveUp(), this.controlService.stopMoving(), R.string.error_move_up); this.subscribeTo(this.view.moveDown(), "Move down", this.controlService.moveDown(), this.controlService.stopMoving(), R.string.error_move_down); this.subscribeTo(this.view.goHome(), "Go home", this.controlService.resetPosition(), R.string.error_go_home); this.subscribeTo(this.view.zoomIn(), "Zoom in", this.controlService.zoomIn(), R.string.error_zoom_in); this.subscribeTo(this.view.zoomOut(), "Zoom out", this.controlService.zoomOut(), R.string.error_zoom_out); this.subscribeToToggleLights(); this.subscribeToSnapshots(); } @Override public void onViewDetached() { super.onViewDetached(); if (this.subscriptions != null && this.subscriptions.hasSubscriptions()) { this.subscriptions.unsubscribe(); this.subscriptions = null; } } //endregion private void checkDeviceCapability() { this.subscriptions.add(this.deviceService.getDeviceInfo() .observeOn(AndroidSchedulers.mainThread()) .subscribe( deviceInfo -> { this.view.showPanTiltControls(deviceInfo.canPanTilt()); this.view.showZoomControls(deviceInfo.canZoom()); }, error -> this.log().w(this.getClass().getSimpleName(), error.getMessage(), error))); } private void subscribeTo(@NonNull final Observable<Void> action, @NonNull final String log, @NonNull final Observable<BaseResponse> request, final int errorResourceId) { this.subscriptions.add(action.doOnNext(dummy -> this.log().d(this.getClass().getSimpleName(), log)).subscribe(dummy -> { request .observeOn(AndroidSchedulers.mainThread()) .subscribe( response -> { if (response.getResultCode() != ResultCode.SUCCESS) { this.log().w(this.getClass().getSimpleName(), response.getResultCode().name()); this.view.showErrorMessage(this.view.getContext().getText(errorResourceId)); } }, error -> { this.log().w(this.getClass().getSimpleName(), error.getMessage(), error); this.view.showErrorMessage(error.getMessage()); }); })); } private void subscribeTo(@NonNull final Observable<Void> action, @NonNull final String log, @NonNull final Observable<BaseResponse> startRequest, final Observable<BaseResponse> stopRequest, final int errorResourceId) { this.subscriptions.add(action.doOnNext(dummy -> this.log().d(this.getClass().getSimpleName(), log)).subscribe(dummy -> { startRequest .filter(response -> { if (response.getResultCode() == ResultCode.SUCCESS) return true; this.log().w(this.getClass().getSimpleName(), response.getResultCode().name()); this.view.showErrorMessage(this.view.getContext().getText(errorResourceId)); return false; }) .doOnError(error -> { this.log().w(this.getClass().getSimpleName(), error.getMessage(), error); this.view.showErrorMessage(error.getMessage()); }) .flatMap(response -> stopRequest) .delaySubscription(Constants.MOVE_DURATION, TimeUnit.SECONDS) .subscribe( response -> { if (response.getResultCode() != ResultCode.SUCCESS) this.log().w(this.getClass().getSimpleName(), response.getResultCode().name()); }, error -> { this.log().w(this.getClass().getSimpleName(), error.getMessage(), error); this.view.showErrorMessage(error.getMessage()); } ); })); } @VisibleForTesting /* private */ void subscribeToImageUpdates() { this.subscriptions.add(this.videoService.snapshot() .doOnNext(bytes -> ImageUtils.toBitmap(bytes) .observeOn(AndroidSchedulers.mainThread()) .subscribeOn(Schedulers.io()) .subscribe(bitmap -> this.view.updateImage(bitmap), error -> this.log().e(this.getClass().getSimpleName(), error.getMessage(), error))) .repeat() .doOnError(error -> this.log().e(this.getClass().getSimpleName(), error.getMessage(), error)) .retry() .subscribe()); } @VisibleForTesting /* private */ void subscribeToToggleLights() { this.subscriptions.add(this.view.toggleLight().doOnNext(dummy -> this.log().d(this.getClass().getSimpleName(), "Toggle light")).subscribe(dummy -> { this.deviceService.getDeviceSettings() .subscribe(deviceSettings -> { (deviceSettings.isInfraRedLEDTurnedOn() ? this.deviceService.turnOffInfraRedLED() : this.deviceService.turnOnInfraRedLED()) .observeOn(AndroidSchedulers.mainThread()) .subscribe( infraRedLEDStatus -> { if (infraRedLEDStatus.getResultCode() != ResultCode.SUCCESS || infraRedLEDStatus.isSuccess()) { this.log().w(this.getClass().getSimpleName(), infraRedLEDStatus.getResultCode().name() + ": isSuccess = " + infraRedLEDStatus.isSuccess()); this.view.showErrorMessage(this.view.getContext().getText(R.string.error_toggle_light)); } }, error -> { this.log().w(this.getClass().getSimpleName(), error.getMessage(), error); this.view.showErrorMessage(error.getMessage()); }); }); })); } @VisibleForTesting /* private */ void subscribeToSnapshots() { this.subscriptions.add(this.view.snapshot() .observeOn(AndroidSchedulers.mainThread()) .subscribe( dummy -> this.view.saveImage(), error -> { this.log().w(this.getClass().getSimpleName(), error.getMessage(), error); this.view.showErrorMessage(error.getMessage()); })); } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android.toolchain.ndk.impl; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assume.assumeThat; import static org.junit.Assume.assumeTrue; import com.facebook.buck.android.AssumeAndroidPlatform; import com.facebook.buck.android.toolchain.ndk.NdkCompilerType; import com.facebook.buck.android.toolchain.ndk.NdkCxxRuntime; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.BuildTargetFactory; import com.facebook.buck.core.model.impl.BuildTargetPaths; import com.facebook.buck.cxx.CxxDescriptionEnhancer; import com.facebook.buck.io.file.MorePaths; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.io.filesystem.TestProjectFilesystems; import com.facebook.buck.testutil.TemporaryPaths; import com.facebook.buck.testutil.integration.ProjectWorkspace; import com.facebook.buck.testutil.integration.TestDataHelper; import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Optional; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @RunWith(Parameterized.class) public class NdkCxxPlatformIntegrationTest { @Parameterized.Parameters(name = "{0},{1},{2}") public static Collection<Object[]> data() { ImmutableList.Builder<String> architectures = ImmutableList.builder(); if (AssumeAndroidPlatform.isArmAvailable()) { architectures.add("arm"); } architectures.add("armv7", "arm64", "x86", "x86_64"); List<Object[]> data = new ArrayList<>(); for (String arch : architectures.build()) { data.add(new Object[] {NdkCompilerType.GCC, NdkCxxRuntime.GNUSTL, arch}); // We don't support 64-bit clang yet. if (!arch.equals("arm64") && !arch.equals("x86_64")) { data.add(new Object[] {NdkCompilerType.CLANG, NdkCxxRuntime.GNUSTL, arch}); data.add(new Object[] {NdkCompilerType.CLANG, NdkCxxRuntime.LIBCXX, arch}); } } return data; } @Parameterized.Parameter public NdkCompilerType compiler; @Parameterized.Parameter(value = 1) public NdkCxxRuntime cxxRuntime; @Parameterized.Parameter(value = 2) public String arch; @Rule public TemporaryPaths tmp = new TemporaryPaths("ndk-test", true); @Rule public TemporaryPaths tmp_long_pwd = new TemporaryPaths("ndk-test-long-pwd", true); private String architectures; private ProjectWorkspace setupWorkspace(String name, TemporaryPaths tmp) throws IOException { ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, name, tmp); workspace.setUp(); workspace.writeContentsToPath( String.format( "[ndk]\n" + " compiler = %s\n" + " gcc_version = 4.9\n" + " cxx_runtime = %s\n" + " cpu_abis = " + architectures + "\n" + " app_platform = android-21\n", compiler, cxxRuntime), ".buckconfig"); return workspace; } private Path getNdkRoot() { ProjectFilesystem projectFilesystem = TestProjectFilesystems.createProjectFilesystem(Paths.get(".").toAbsolutePath()); Path ndkDir = AndroidNdkHelper.detectAndroidNdk(projectFilesystem).get().getNdkRootPath(); assertTrue(java.nio.file.Files.exists(ndkDir)); return ndkDir; } @Before public void setUp() { AssumeAndroidPlatform.assumeNdkIsAvailable(); if (AssumeAndroidPlatform.isArmAvailable()) { architectures = "arm, armv7, arm64, x86, x86_64"; } else { architectures = "armv7, arm64, x86, x86_64"; } } @Test public void runtimeSupportsStl() throws IOException { assumeTrue( "libcxx is unsupported with this ndk", NdkCxxPlatforms.isSupportedConfiguration(getNdkRoot(), cxxRuntime)); ProjectWorkspace workspace = setupWorkspace("runtime_stl", tmp); workspace.runBuckCommand("build", String.format("//:main#android-%s", arch)).assertSuccess(); } @Test public void changedPlatformTarget() throws IOException { assumeTrue( "libcxx is unsupported with this ndk", NdkCxxPlatforms.isSupportedConfiguration(getNdkRoot(), cxxRuntime)); // 64-bit only works with platform 21, so we can't change the platform to anything else. assumeThat( "skip this test for 64-bit, for now", arch, not(anyOf(equalTo("arm64"), equalTo("x86_64")))); ProjectWorkspace workspace = setupWorkspace("ndk_app_platform", tmp); BuildTarget target = BuildTargetFactory.newInstance(String.format("//:main#android-%s", arch)); BuildTarget linkTarget = CxxDescriptionEnhancer.createCxxLinkTarget(target, Optional.empty()); workspace.runBuckCommand("build", target.toString()).assertSuccess(); workspace.getBuildLog().assertTargetBuiltLocally(linkTarget.toString()); // Change the app platform and verify that our rulekey has changed. workspace.writeContentsToPath("[ndk]\n app_platform = android-17", ".buckconfig"); workspace.runBuckCommand("build", target.toString()).assertSuccess(); workspace.getBuildLog().assertTargetBuiltLocally(linkTarget.toString()); } @Test public void testWorkingDirectoryAndNdkHeaderPathsAreSanitized() throws IOException { String buckConfig = "[ndk]\n" + " cpu_abis = " + architectures + "\n" + " gcc_version = 4.9\n" + " app_platform = android-21\n"; ProjectWorkspace workspace = setupWorkspace("ndk_debug_paths", tmp); ProjectFilesystem filesystem = TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath()); workspace.writeContentsToPath(buckConfig, ".buckconfig"); BuildTarget target = BuildTargetFactory.newInstance(String.format("//:lib#android-%s,static", arch)); workspace.runBuckBuild(target.getFullyQualifiedName()).assertSuccess(); Path lib = workspace.getPath( BuildTargetPaths.getGenPath( filesystem, target, "%s/lib" + target.getShortName() + ".a")); String contents = MorePaths.asByteSource(lib).asCharSource(Charsets.ISO_8859_1).read(); // Verify that the working directory is sanitized. assertFalse(contents.contains(tmp.getRoot().toString())); // Verify that we don't have any references to the build toolchain in the debug info. for (NdkCxxPlatforms.Host host : NdkCxxPlatforms.Host.values()) { assertFalse(contents.contains(host.toString())); } // Verify that the NDK path is sanitized. assertFalse(contents.contains(getNdkRoot().toString())); // Run another build in a location with a longer PWD, to verify that this doesn't affect output. ProjectWorkspace longPwdWorkspace = setupWorkspace("ndk_debug_paths", tmp_long_pwd); ProjectFilesystem longPwdFilesystem = TestProjectFilesystems.createProjectFilesystem(workspace.getDestPath()); longPwdWorkspace.writeContentsToPath(buckConfig, ".buckconfig"); longPwdWorkspace.runBuckBuild(target.getFullyQualifiedName()).assertSuccess(); lib = longPwdWorkspace.getPath( BuildTargetPaths.getGenPath( longPwdFilesystem, target, "%s/lib" + target.getShortName() + ".a")); String movedContents = MorePaths.asByteSource(lib).asCharSource(Charsets.ISO_8859_1).read(); assertEquals(contents, movedContents); } }
package dk.brics.tajs.analysis.nativeobjects; import java.util.ArrayList; import java.util.regex.Matcher; import java.util.regex.Pattern; import dk.brics.tajs.analysis.Conversion; import dk.brics.tajs.analysis.FunctionCalls.CallInfo; import dk.brics.tajs.analysis.NativeFunctions; import dk.brics.tajs.analysis.Solver; import dk.brics.tajs.analysis.State; import dk.brics.tajs.analysis.dom.ajax.ReadystateEvent; import dk.brics.tajs.analysis.dom.event.EventListener; import dk.brics.tajs.analysis.dom.event.KeyboardEvent; import dk.brics.tajs.analysis.dom.event.MouseEvent; import dk.brics.tajs.analysis.dom.event.UIEvent; import dk.brics.tajs.analysis.dom.event.WheelEvent; import dk.brics.tajs.dependency.Dependency; import dk.brics.tajs.dependency.DependencyAnalyzer; import dk.brics.tajs.dependency.DependencyID; import dk.brics.tajs.dependency.DependencyObject; import dk.brics.tajs.dependency.graph.DependencyGraphReference; import dk.brics.tajs.dependency.graph.DependencyNode; import dk.brics.tajs.dependency.graph.Label; import dk.brics.tajs.dependency.graph.nodes.DependencyExpressionNode; import dk.brics.tajs.dependency.graph.nodes.DependencyObjectNode; import dk.brics.tajs.dependency.interfaces.IDependency; import dk.brics.tajs.flowgraph.Node; import dk.brics.tajs.flowgraph.SourceLocation; import dk.brics.tajs.flowgraph.nodes.CallNode; import dk.brics.tajs.lattice.Value; import dk.brics.tajs.solver.Message.Severity; import dk.brics.tajs.solver.Message.Status; import dk.brics.tajs.util.Pair; import dk.brics.tajs.util.Triple; /** * 15.1 and B.2 native global functions. */ public class JSGlobal { private JSGlobal() { } /** * Evaluates the given native function. */ public static Value evaluate(ECMAScriptObjects nativeobject, CallInfo<?> call, State state, Solver.SolverInterface c) { if (NativeFunctions.throwTypeErrorIfConstructor(call, state, c)) return Value.makeBottom(new Dependency(), new DependencyGraphReference()); switch (nativeobject) { // case EVAL: { // 15.1.2.1 // NativeFunctions.expectParameters(n, c, 0, 1); // TODO: 'eval' // } case PARSEINT: { // 15.1.2.2 // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 2); Value str = Conversion.toString(NativeFunctions.readParameter(call, 0), c); Value basis; if (call.isUnknownNumberOfArgs()) basis = NativeFunctions.readParameter(call, 1).joinNum(0); else basis = call.getNumberOfArgs() >= 2 ? Conversion.toNumber(NativeFunctions.readParameter(call, 1), c) : Value.makeNum(0, new Dependency(), node.getReference()); // ################################################## dependency.join(str.getDependency()); dependency.join(basis.getDependency()); // ################################################## // ================================================== node.addParent(str); node.addParent(basis); // ================================================== if (str.isMaybeSingleStr() && basis.isMaybeSingleNum()) { String s = str.getStr().trim(); double sign = 1; if (s.length() > 0 && s.charAt(0) == '-') sign = -1; if (s.length() > 0 && (s.charAt(0) == '-' || s.charAt(0) == '+')) s = s.substring(1); int radix = Conversion.toInt32(basis.getNum()); if (radix == 0) { radix = 10; if (s.length() > 1 && s.charAt(0) == '0') { radix = 8; if (s.length() > 2 && Character.toLowerCase(s.charAt(1)) == 'x') { radix = 16; s = s.substring(2); } } } if (radix < 2 || radix > 36) return Value.makeNum(Double.NaN, dependency, node.getReference()); else { int i; String z = s; for (i = 0; i < s.length(); i++) if (Character.digit(s.charAt(i), radix) < 0) { z = s.substring(0, i); break; } if (z.equals("")) return Value.makeNum(Double.NaN, dependency, node.getReference()); else return Value.makeNum(sign * Integer.parseInt(z, radix), dependency, node.getReference()); } } else return Value.makeAnyNum(dependency, node.getReference()); } case PARSEFLOAT: { // 15.1.2.3 // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 1); Value str = Conversion.toString(NativeFunctions.readParameter(call, 0), c); // ################################################## dependency.join(str.getDependency()); // ################################################## if (str.isMaybeSingleStr()) { String s = str.getStr().trim(); Pattern p = Pattern.compile("[+-]?(Infinity|([0-9]+\\.[0-9]*|\\.[0-9]+|[0-9]+)([eE][+-]?[0-9]+)?)"); Matcher m = p.matcher(s); if (m.lookingAt()) return Value.makeNum(Double.parseDouble(m.group(0)), dependency, node.getReference()); else return Value.makeNum(Double.NaN, dependency, node.getReference()); } else return Value.makeAnyNum(dependency, node.getReference()); } case ISNAN: { // 15.1.2.4 // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 1); Value num = Conversion.toNumber(NativeFunctions.readParameter(call, 0), c); // ################################################## dependency.join(num.getDependency()); // ################################################## // ================================================== node.addParent(num); // ================================================== Value res = Value.makeBottom(dependency, node.getReference()); if (num.isMaybeNaN()) res = res.joinBool(true); if (num.isMaybeSingleNum() || num.isMaybeInf() || num.isMaybeNumUInt() || num.isMaybeNumNotUInt()) res = res.joinBool(false); return res; } case ISFINITE: { // 15.1.2.5 // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 1); Value num = Conversion.toNumber(NativeFunctions.readParameter(call, 0), c); // ################################################## dependency.join(num.getDependency()); // ################################################## // ================================================== node.addParent(num); // ================================================== if (num.isMaybeSingleNum()) return Value.makeBool(!num.getNum().isInfinite(), dependency, node.getReference()); Value res = Value.makeBottom(dependency, node.getReference()); if (num.isMaybeNaN() || num.isMaybeInf()) res = res.joinBool(false); if (num.isMaybeNumUInt() || num.isMaybeNumNotUInt()) res = res.joinBool(true); return res; } case PRINT: // in Rhino, expects any number of parameters; returns // undefined case ALERT: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== return Value.makeUndef(dependency, node.getReference()); } case DECODEURI: // 15.1.3.1 case DECODEURICOMPONENT: // 15.1.3.2 case ENCODEURI: // 15.1.3.3 case ENCODEURICOMPONENT: // 15.1.3.4 case ESCAPE: // B.2.1 case UNESCAPE: { // B.2.2 // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 1); Value str = Conversion.toNumber(NativeFunctions.readParameter(call, 0), c); // ################################################## dependency.join(str.getDependency()); // ################################################## // ================================================== node.addParent(str); // ================================================== return Value.makeAnyStr(dependency, node.getReference()); // TODO: // could // improve // precision for constant // strings } /* * ############################################################ * Dependency function, to mark values with source location * ############################################################ */ case TRACE: { NativeFunctions.expectParameters(nativeobject, call, c, 1, 2); Value value = NativeFunctions.readParameter(call, 0); SourceLocation sourceLocation = call.getSourceNode().getSourceLocation(); DependencyObject dependencyObject = DependencyObject.getDependencyObject(sourceLocation); Dependency dependency = new Dependency(dependencyObject); // ================================================== DependencyObjectNode node = new DependencyObjectNode(dependencyObject, c.getDependencyGraph().getRoot()); // TODO: is this correct ? dNode -> ROOT // node.addParent(value); value = value.setDependencyGraphReference(node.getReference()); // ================================================== // EXTENSION if(call.getNumberOfArgs()==2) { Value idValue = NativeFunctions.readParameter(call, 1); DependencyID id = new DependencyID(idValue.getStr()); DependencyID.cacheSet(id, dependencyObject); } value = value.joinDependency(dependency); return value; } case UNTRACE: { NativeFunctions.expectParameters(nativeobject, call, c, 2, 2); Value value = NativeFunctions.readParameter(call, 0); Value idValue = NativeFunctions.readParameter(call, 1); DependencyID id = new DependencyID(idValue.getStr()); if(DependencyID.cacheContains(id)) { DependencyObject dependencyToRemove = DependencyID.cacheGet(id); System.out.println("@ UNTRACE of " + id + " with " + dependencyToRemove + " in " + value); System.out.println("@ OLD VALUE " + value.getDependency()); Dependency newDependency = new Dependency(); for (DependencyObject dependencyObject : value.getDependency()) { if(!dependencyObject.equals(dependencyToRemove)) newDependency.join(dependencyObject); } value = value.setDependency(newDependency); System.out.println("@ NEW VALUE " + value.getDependency()); } // SourceLocation sourceLocation = call.getSourceNode().getSourceLocation(); // DependencyObject dependencyObject = DependencyObject.getDependencyObject(sourceLocation); // Dependency dependency = new Dependency(dependencyObject); // // // ================================================== // DependencyObjectNode node = new DependencyObjectNode(dependencyObject, c.getDependencyGraph().getRoot()); // // TODO: is this correct ? dNode -> ROOT // // node.addParent(value); // value = value.setDependencyGraphReference(node.getReference()); // // ================================================== // // value = value.joinDependency(dependency); return value; } /* * ############################################################ * Dependency function, to evaluate value dependency * ############################################################ */ case DUMPDEPENDENCY: { if (call.getNumberOfArgs() == 0) { // dump state dependency SourceLocation sourceLocation = call.getSourceNode().getSourceLocation(); Triple<String, IDependency<?>, SourceLocation> key = new Triple<String, IDependency<?>, SourceLocation>("", state, sourceLocation); if (!DependencyAnalyzer.dumps.containsKey(key)) { DependencyAnalyzer.dumps.put(key, new ArrayList<Pair<Dependency, DependencyGraphReference>>()); } DependencyAnalyzer.dumps.get(key).add( new Pair<Dependency, DependencyGraphReference>(state.getDependency(), state.getDependencyGraphReference())); } else { // dump value dependency CallNode n; // if(call instanceof CallNode) n = (CallNode) call.getSourceNode(); // else // return Value.makeUndef(new Dependency(), new // DependencyGraphReference()); // Node x = call.g (); // CallNode n = call.getSourceNode(); for (int i = 0; i < call.getNumberOfArgs(); i++) { Triple<String, IDependency<?>, SourceLocation> key = new Triple<String, IDependency<?>, SourceLocation>("v" + n.getArgVar(i), call.getArg(i), n.getSourceLocation()); if (!DependencyAnalyzer.dumps.containsKey(key)) { DependencyAnalyzer.dumps.put(key, new ArrayList<Pair<Dependency, DependencyGraphReference>>()); } DependencyAnalyzer.dumps.get(key).add( new Pair<Dependency, DependencyGraphReference>(call.getArg(i).getDependency(), call.getArg(i).getDependencyGraphReference())); } } return Value.makeUndef(new Dependency(), new DependencyGraphReference()); } case ASSERT: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 1); Value x = Conversion.toBoolean(NativeFunctions.readParameter(call, 0)); // ################################################## dependency.join(x.getDependency()); // ################################################## // ================================================== node.addParent(x); // ================================================== c.addMessage(x.isMaybeFalseButNotTrue() ? Status.CERTAIN : x.isMaybeFalse() ? Status.MAYBE : Status.NONE, Severity.HIGH, "Assertion fails"); return Value.makeUndef(dependency, node.getReference()); } case DUMPVALUE: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 1); Value x = NativeFunctions.readParameter(call, 0); // ################################################## dependency.join(x.getDependency()); // ################################################## // ================================================== node.addParent(x); // ================================================== c.addMessage(Status.INFO, Severity.HIGH, "Abstract value: " + x /* * + * " (context: " * + * c * . * getCurrentContext * ( * ) * + * ")" */); return Value.makeUndef(dependency, node.getReference()); } case DUMPPROTOTYPE: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 1); Value x = NativeFunctions.readParameter(call, 0); // ################################################## dependency.join(x.getDependency()); // ################################################## // ================================================== node.addParent(x); // ================================================== StringBuilder sb = new StringBuilder(); Value p = state.readInternalPrototype(x.getObjectLabels()); while (p.isMaybeObject()) { sb.append(p.toString()); p = state.readInternalPrototype(p.getObjectLabels()); if (!p.isNullOrUndef()) { sb.append(" -> "); } } c.addMessage(Status.INFO, Severity.HIGH, "Prototype: " + sb); return Value.makeUndef(dependency, node.getReference()); } case DUMPOBJECT: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 1); Value x = NativeFunctions.readParameter(call, 0); // ################################################## dependency.join(x.getDependency()); // ################################################## // ================================================== node.addParent(x); // ================================================== c.addMessage(Status.INFO, Severity.HIGH, "Abstract object: " + state.printObject(x) /* * + * " (context: " * + * c * . * getCurrentContext * ( * ) * + * ")" */); return Value.makeUndef(dependency, node.getReference()); } case DUMPSTATE: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 0, 0); c.addMessage(Status.INFO, Severity.HIGH, "Abstract state:\n" + state /* * + * " (context: " * + * c * . * getCurrentContext * ( * ) * + * ")" */); /* * try { FileWriter fw = new FileWriter("state.dot"); * fw.write(state.toDot(false)); fw.close(); } catch (IOException e) * { throw new RuntimeException(e); } */ return Value.makeUndef(dependency, node.getReference()); } case DUMPMODIFIEDSTATE: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 0, 0); c.addMessage(Status.INFO, Severity.HIGH, "Abstract state (modified parts):" /* * + * " (context: " * + * c * . * getCurrentContext * ( * ) * + * ")" */ + state.toStringModified()); return Value.makeUndef(dependency, node.getReference()); } case DUMPATTRIBUTES: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 2, 2); Value x = NativeFunctions.readParameter(call, 0); Value p = Conversion.toString(NativeFunctions.readParameter(call, 1), c); // ################################################## dependency.join(x.getDependency()); dependency.join(p.getDependency()); // ################################################## // ================================================== node.addParent(p); node.addParent(x); // ================================================== if (!p.isMaybeSingleStr()) c.addMessage(Status.INFO, Severity.HIGH, "Calling dumpAttributes with non-constant property name"); else { String propertyname = p.getStr(); Value v = state.readPropertyDirect(x.getObjectLabels(), propertyname); c.addMessage(Status.INFO, Severity.HIGH, "Property attributes: " + v.printAttributes() /* * + * " (context: " * + * c * . * getCurrentContext * ( * ) * + * ")" */); } return Value.makeUndef(dependency, node.getReference()); } case DUMPOBJECTORIGIN: { // TODO: remove dumpObjectOrigin? (use // dumpObject instead) // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 1); Value x = NativeFunctions.readParameter(call, 0); // ################################################## dependency.join(x.getDependency()); // ################################################## // ================================================== node.addParent(x); // ================================================== c.addMessage(Status.INFO, Severity.HIGH, "Origin of objects: " + state.printObjectOrigins(x) /* * ")" */); return Value.makeUndef(dependency, node.getReference()); } case CONVERSION_TO_PRIMITIVE: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 2); Value varg = NativeFunctions.readParameter(call, 0); // ################################################## dependency.join(varg.getDependency()); // ################################################## // ================================================== node.addParent(varg); // ================================================== Value vhint; if (call.isUnknownNumberOfArgs()) vhint = NativeFunctions.readParameter(call, 1).joinStr("NONE"); else vhint = call.getNumberOfArgs() >= 2 ? NativeFunctions.readParameter(call, 1) : Value.makeStr("NONE", dependency, node.getReference()); if (!vhint.isMaybeSingleStr()) { c.addMessage(Status.INFO, Severity.HIGH, "Calling conversionToPrimitive with non-constant hint string"); return Value.makeUndef(dependency, node.getReference()); } else { String shint = vhint.getStr(); return Conversion.toPrimitive(varg, shint.equals("NONE") ? Conversion.Hint.NONE : shint.equals("NUM") ? Conversion.Hint.NUM : Conversion.Hint.STR, c); } } case ASSUME_NON_NULLUNDEF: { // TODO: remove assumeNonNullUndef? (see // AssumeNode) // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== NativeFunctions.expectParameters(nativeobject, call, c, 1, 2); if (call.isUnknownNumberOfArgs()) { c.addMessage(Status.INFO, Severity.HIGH, "Calling assumeNonNullUndef with unknown number of arguments"); } else { if (call.getNumberOfArgs() == 1) { Value varg = NativeFunctions.readParameter(call, 0); // ################################################## dependency.join(varg.getDependency()); // ################################################## // ================================================== node.addParent(varg); // ================================================== if (!varg.isMaybeSingleStr()) { c.addMessage(Status.INFO, Severity.HIGH, "Calling assumeNonNullUndef with non-constant variable string"); } else { String varname = varg.getStr(); Value v = state.readVariable(varname); // ################################################## dependency.join(v.getDependency()); // ################################################## // ================================================== node.addParent(v); // ================================================== v = v.restrictToNotNullNotUndef().clearAbsent(); if (v.isNoValue()) return Value.makeBottom(dependency, node.getReference()); state.writeVariable(varname, v); } } else if (call.getNumberOfArgs() == 2) { throw new RuntimeException("2 arg variant of assumeNonNullUndef not yet implemented");// TODO: // assumeNonNullUndef } } return Value.makeUndef(dependency, node.getReference()); } case TAJS_GET_UI_EVENT: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== return Value.makeObject(UIEvent.INSTANCES, dependency, node.getReference()); } // case TAJS_GET_DOCUMENT_EVENT: { // // ################################################## // Dependency dependency = new Dependency(); // // ################################################## // // // ================================================== // DependencyExpressionNode node = DependencyNode.link(Label.CALL, // call.getSourceNode(), state); // // ================================================== // // return Value.makeObject(DocumentEvent.DOCUMENT_EVENT, dependency, // node.getReference()); // } case TAJS_GET_MOUSE_EVENT: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== return Value.makeObject(MouseEvent.INSTANCES, dependency, node.getReference()); } case TAJS_GET_AJAX_EVENT: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== return Value.makeObject(ReadystateEvent.INSTANCES, dependency, node.getReference()); } case TAJS_GET_KEYBOARD_EVENT: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== return Value.makeObject(KeyboardEvent.INSTANCES, dependency, node.getReference()); } case TAJS_GET_EVENT_LISTENER: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== return Value.makeObject(EventListener.INSTANCES, dependency, node.getReference()); } case TAJS_GET_WHEEL_EVENT: { // ################################################## Dependency dependency = new Dependency(); // ################################################## // ================================================== DependencyExpressionNode node = DependencyNode.link(Label.CALL, call.getSourceNode(), state); // ================================================== return Value.makeObject(WheelEvent.INSTANCES, dependency, node.getReference()); } default: return null; } } }
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.integration.tool.viewdefinitioneditor; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.EventQueue; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.awt.event.WindowEvent; import java.awt.event.WindowStateListener; import java.util.concurrent.SynchronousQueue; import javax.swing.JFrame; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.JTextField; import javax.swing.ListCellRenderer; import javax.swing.ListSelectionModel; import javax.swing.SwingUtilities; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.opengamma.component.tool.AbstractTool; import com.opengamma.core.config.ConfigSource; import com.opengamma.core.config.impl.ConfigItem; import com.opengamma.engine.view.ViewDefinition; import com.opengamma.financial.tool.ToolContext; import com.opengamma.integration.swing.ViewEntry; import com.opengamma.integration.swing.ViewListCellRenderer; import com.opengamma.integration.swing.ViewListModel; import com.opengamma.scripts.Scriptable; /** * Debugging tool for engine functions. */ @Scriptable public class ViewDefinitionEditor extends AbstractTool<ToolContext> { /** Logger. */ private static final Logger LOGGER = LoggerFactory.getLogger(ViewDefinitionEditor.class); private JFrame _frame; private JTextField _viewNameTextField; private ViewListModel _viewListModel; //------------------------------------------------------------------------- /** * Main method to run the tool. * * @param args the standard tool arguments, not null */ public static void main(final String[] args) { new ViewDefinitionEditor().invokeAndTerminate(args); } //------------------------------------------------------------------------- /** * Initialize the contents of the frame. * @wbp.parser.entryPoint */ private void initialize() { _frame = new JFrame(); _frame.setTitle("View Definition Editor"); _frame.setPreferredSize(new Dimension(1000, 700)); _frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); final JPanel mainPanel = new JPanel(); _frame.getContentPane().add(mainPanel, BorderLayout.CENTER); mainPanel.setLayout(new BorderLayout()); _viewList = new JList<ViewEntry>(); _viewListModel = getViewListModel(); _viewList.setModel(_viewListModel); _viewList.setCellRenderer(getViewListCellRenderer()); _viewList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); final ConfigSource configSource = getToolContext().getConfigSource(); final JPanel panel = new JPanel(); mainPanel.add(panel, BorderLayout.CENTER); panel.setLayout(new BorderLayout(0, 0)); _splitPane = new JSplitPane(); panel.add(_splitPane); //JScrollPane failuresScrollPane = new JScrollPane(_failuresTreeTable); //_splitPane.setRightComponent(failuresScrollPane); _viewNameTextField = new JTextField(); _viewNameTextField.setHorizontalAlignment(JTextField.LEFT); _viewNameTextField.addKeyListener(new KeyListener() { private void actionPerformed(final KeyEvent e) { final JTextField field = _viewNameTextField; _viewListModel.setFilter(field.getText());; } @Override public void keyTyped(final KeyEvent e) { LOGGER.warn("key code = {}", e.getKeyCode()); actionPerformed(e); } @Override public void keyPressed(final KeyEvent e) { LOGGER.warn("key pressed = {}", e.getKeyCode()); if (e.getKeyCode() == KeyEvent.VK_DOWN) { _viewList.requestFocusInWindow(); } } @Override public void keyReleased(final KeyEvent e) { } }); _viewList.addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(final ListSelectionEvent e) { @SuppressWarnings("unchecked") final JList<ViewEntry> cb = (JList<ViewEntry>) e.getSource(); final ViewEntry viewEntry = cb.getSelectedValue(); if (viewEntry != null) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { @SuppressWarnings("unchecked") final ConfigItem<ViewDefinition> configItem = (ConfigItem<ViewDefinition>) configSource.get(viewEntry.getUniqueId()); if (configItem.getValue() != null) { _viewNameTextField.setText(viewEntry.getName()); //_portfolioTree.setModel(getPortfolioTreeModel(configItem.getValue().getPortfolioId(), getToolContext())); } else { JOptionPane.showMessageDialog(null, "There is no portfolio set in the selected view", "No portfolio", JOptionPane.ERROR_MESSAGE); } } }); } } }); final JPanel viewSelectionPanel = new JPanel(new BorderLayout()); final JScrollPane scrollPane = new JScrollPane(_viewList); viewSelectionPanel.add(_viewNameTextField, BorderLayout.PAGE_START); viewSelectionPanel.add(scrollPane, BorderLayout.CENTER); _splitPane.setLeftComponent(viewSelectionPanel); } private ListCellRenderer<? super ViewEntry> getViewListCellRenderer() { return new ViewListCellRenderer(); } private ViewListModel getViewListModel() { return new ViewListModel(getToolContext().getConfigMaster()); } private final SynchronousQueue<Void> _endQueue = new SynchronousQueue<>(); private JSplitPane _splitPane; private JList<ViewEntry> _viewList; @Override protected void doRun() throws Exception { initialize(); EventQueue.invokeLater(new Runnable() { @Override public void run() { try { _frame.pack(); _frame.setVisible(true); _frame.addWindowStateListener(new WindowStateListener() { @Override public void windowStateChanged(final WindowEvent e) { if (e.getNewState() == WindowEvent.WINDOW_CLOSED) { _endQueue.add(null); } } }); _splitPane.setDividerLocation(0.3d); } catch (final Exception e) { e.printStackTrace(); } } }); _endQueue.take(); } }
package com.xaosia.bungeepex.platform.bukkit.utils; import java.util.UUID; import com.xaosia.bungeepex.platform.bukkit.BukkitConfig; import com.xaosia.bungeepex.platform.bukkit.BukkitPlugin; import lombok.RequiredArgsConstructor; import com.xaosia.bungeepex.BungeePEX; import com.xaosia.bungeepex.PermissionGroup; import com.xaosia.bungeepex.PermissionUser; import com.xaosia.bungeepex.platform.NetworkNotifier; import org.bukkit.Bukkit; import org.bukkit.entity.Player; @RequiredArgsConstructor public class BukkitNotifier implements NetworkNotifier { private final BukkitConfig config; @Override public void deleteUser(PermissionUser u, String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } if (config.isUseUUIDs()) { sendPM(u.getUUID(), "deleteUser;" + u.getUUID(), origin); } else { sendPM(u.getName(), "deleteUser;" + u.getName(), origin); } } @Override public void deleteGroup(PermissionGroup g, String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } sendPMAll("deleteGroup;" + g.getName(), origin); } @Override public void reloadUser(PermissionUser u, String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } if (config.isUseUUIDs()) { sendPM(u.getUUID(), "reloadUser;" + u.getUUID(), origin); } else { sendPM(u.getName(), "reloadUser;" + u.getName(), origin); } } @Override public void reloadGroup(PermissionGroup g, String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } sendPMAll("reloadGroup;" + g.getName(), origin); } @Override public void reloadUsers(String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } sendPMAll("reloadUsers;", origin); } @Override public void reloadGroups(String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } sendPMAll("reloadGroups", origin); } @Override public void reloadAll(String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } sendPMAll("reloadall", origin); } //bukkit-bungeeperms reload information functions private void sendPM(String player, String msg, String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } Player p = Bukkit.getPlayer(player); if (p != null) { p.sendPluginMessage(BukkitPlugin.getInstance(), BungeePEX.CHANNEL, msg.getBytes()); //send config for match checking sendConfig(p); } else { sendPMAll(msg, origin); } } private void sendPM(UUID player, String msg, String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } Player p = Bukkit.getPlayer(player); if (p != null) { p.sendPluginMessage(BukkitPlugin.getInstance(), BungeePEX.CHANNEL, msg.getBytes()); //send config for match checking sendConfig(p); } else { sendPMAll(msg, origin); } } private void sendPMAll(String msg, String origin) { //if standalone don't notify bungee if (config.isStandalone()) { return; } Player p = Bukkit.getOnlinePlayers().iterator().hasNext() ? Bukkit.getOnlinePlayers().iterator().next() : null; if (p != null) { p.sendPluginMessage(BukkitPlugin.getInstance(), BungeePEX.CHANNEL, msg.getBytes());//todo use utf8 encoding //send config for match checking sendConfig(p); } } public void sendWorldUpdate(Player p) { //if standalone don't notify bungee if (config.isStandalone()) { return; } String world = p.getWorld() == null ? "" : p.getWorld().getName(); p.sendPluginMessage(BukkitPlugin.getInstance(), BungeePEX.CHANNEL, ("playerworldupdate;" + p.getName() + ";" + world).getBytes()); //send config for match checking sendConfig(p); } private long lastConfigUpdate = 0; private void sendConfig(Player p) { synchronized (this) { long now = System.currentTimeMillis(); if (lastConfigUpdate + 5 * 60 * 1000 < now) { lastConfigUpdate = now; p.sendPluginMessage(BukkitPlugin.getInstance(), BungeePEX.CHANNEL, ("configcheck;" + config.getServername() + ";" + config.getBackEndType() + ";" + config.getBackEndType() + ";" + config.isUseUUIDs()).getBytes()); } } } }
/* * Copyright (C) 2012-2015 DataStax Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.driver.core; import java.nio.ByteBuffer; import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Map; import com.google.common.collect.ImmutableMap; import io.netty.buffer.ByteBuf; class Requests { private Requests() {} public static class Startup extends Message.Request { private static final String CQL_VERSION_OPTION = "CQL_VERSION"; private static final String CQL_VERSION = "3.0.0"; public static final String COMPRESSION_OPTION = "COMPRESSION"; public static final Message.Coder<Startup> coder = new Message.Coder<Startup>() { public void encode(Startup msg, ByteBuf dest, ProtocolVersion version) { CBUtil.writeStringMap(msg.options, dest); } public int encodedSize(Startup msg, ProtocolVersion version) { return CBUtil.sizeOfStringMap(msg.options); } }; private final Map<String, String> options; public Startup(ProtocolOptions.Compression compression) { super(Message.Request.Type.STARTUP); ImmutableMap.Builder<String, String> map = new ImmutableMap.Builder<String, String>(); map.put(CQL_VERSION_OPTION, CQL_VERSION); if (compression != ProtocolOptions.Compression.NONE) map.put(COMPRESSION_OPTION, compression.toString()); this.options = map.build(); } @Override public String toString() { return "STARTUP " + options; } } // Only for protocol v1 public static class Credentials extends Message.Request { public static final Message.Coder<Credentials> coder = new Message.Coder<Credentials>() { public void encode(Credentials msg, ByteBuf dest, ProtocolVersion version) { assert version == ProtocolVersion.V1; CBUtil.writeStringMap(msg.credentials, dest); } public int encodedSize(Credentials msg, ProtocolVersion version) { assert version == ProtocolVersion.V1; return CBUtil.sizeOfStringMap(msg.credentials); } }; private final Map<String, String> credentials; public Credentials(Map<String, String> credentials) { super(Message.Request.Type.CREDENTIALS); this.credentials = credentials; } } public static class Options extends Message.Request { public static final Message.Coder<Options> coder = new Message.Coder<Options>() { public void encode(Options msg, ByteBuf dest, ProtocolVersion version) {} public int encodedSize(Options msg, ProtocolVersion version) { return 0; } }; public Options() { super(Message.Request.Type.OPTIONS); } @Override public String toString() { return "OPTIONS"; } } public static class Query extends Message.Request { public static final Message.Coder<Query> coder = new Message.Coder<Query>() { public void encode(Query msg, ByteBuf dest, ProtocolVersion version) { CBUtil.writeLongString(msg.query, dest); msg.options.encode(dest, version); } public int encodedSize(Query msg, ProtocolVersion version) { return CBUtil.sizeOfLongString(msg.query) + msg.options.encodedSize(version); } }; public final String query; public final QueryProtocolOptions options; public Query(String query) { this(query, QueryProtocolOptions.DEFAULT); } public Query(String query, QueryProtocolOptions options) { super(Type.QUERY); this.query = query; this.options = options; } @Override public String toString() { return "QUERY " + query + '(' + options + ')'; } } public static class Execute extends Message.Request { public static final Message.Coder<Execute> coder = new Message.Coder<Execute>() { public void encode(Execute msg, ByteBuf dest, ProtocolVersion version) { CBUtil.writeBytes(msg.statementId.bytes, dest); msg.options.encode(dest, version); } public int encodedSize(Execute msg, ProtocolVersion version) { return CBUtil.sizeOfBytes(msg.statementId.bytes) + msg.options.encodedSize(version); } }; public final MD5Digest statementId; public final QueryProtocolOptions options; public Execute(MD5Digest statementId, QueryProtocolOptions options) { super(Message.Request.Type.EXECUTE); this.statementId = statementId; this.options = options; } @Override public String toString() { return "EXECUTE " + statementId + " (" + options + ')'; } } static enum QueryFlag { // The order of that enum matters!! VALUES, SKIP_METADATA, PAGE_SIZE, PAGING_STATE, SERIAL_CONSISTENCY, DEFAULT_TIMESTAMP, VALUE_NAMES; public static EnumSet<QueryFlag> deserialize(int flags) { EnumSet<QueryFlag> set = EnumSet.noneOf(QueryFlag.class); QueryFlag[] values = QueryFlag.values(); for (int n = 0; n < values.length; n++) { if ((flags & (1 << n)) != 0) set.add(values[n]); } return set; } public static int serialize(EnumSet<QueryFlag> flags) { int i = 0; for (QueryFlag flag : flags) i |= 1 << flag.ordinal(); return i; } } public static class QueryProtocolOptions { public static final QueryProtocolOptions DEFAULT = new QueryProtocolOptions(ConsistencyLevel.ONE, Collections.<ByteBuffer>emptyList(), false, -1, null, ConsistencyLevel.SERIAL, Long.MIN_VALUE); private final EnumSet<QueryFlag> flags = EnumSet.noneOf(QueryFlag.class); public final ConsistencyLevel consistency; public final List<ByteBuffer> values; public final boolean skipMetadata; public final int pageSize; public final ByteBuffer pagingState; public final ConsistencyLevel serialConsistency; public final long defaultTimestamp; public QueryProtocolOptions(ConsistencyLevel consistency, List<ByteBuffer> values, boolean skipMetadata, int pageSize, ByteBuffer pagingState, ConsistencyLevel serialConsistency, long defaultTimestamp) { this.consistency = consistency; this.values = values; this.skipMetadata = skipMetadata; this.pageSize = pageSize; this.pagingState = pagingState; this.serialConsistency = serialConsistency; this.defaultTimestamp = defaultTimestamp; // Populate flags if (!values.isEmpty()) flags.add(QueryFlag.VALUES); if (skipMetadata) flags.add(QueryFlag.SKIP_METADATA); if (pageSize >= 0) flags.add(QueryFlag.PAGE_SIZE); if (pagingState != null) flags.add(QueryFlag.PAGING_STATE); if (serialConsistency != ConsistencyLevel.SERIAL) flags.add(QueryFlag.SERIAL_CONSISTENCY); if (defaultTimestamp != Long.MIN_VALUE) flags.add(QueryFlag.DEFAULT_TIMESTAMP); } public void encode(ByteBuf dest, ProtocolVersion version) { switch (version) { case V1: if (flags.contains(QueryFlag.VALUES)) CBUtil.writeValueList(values, dest); CBUtil.writeConsistencyLevel(consistency, dest); break; case V2: case V3: CBUtil.writeConsistencyLevel(consistency, dest); dest.writeByte((byte)QueryFlag.serialize(flags)); if (flags.contains(QueryFlag.VALUES)) CBUtil.writeValueList(values, dest); if (flags.contains(QueryFlag.PAGE_SIZE)) dest.writeInt(pageSize); if (flags.contains(QueryFlag.PAGING_STATE)) CBUtil.writeValue(pagingState, dest); if (flags.contains(QueryFlag.SERIAL_CONSISTENCY)) CBUtil.writeConsistencyLevel(serialConsistency, dest); if (version == ProtocolVersion.V3 && flags.contains(QueryFlag.DEFAULT_TIMESTAMP)) dest.writeLong(defaultTimestamp); break; default: throw version.unsupported(); } } public int encodedSize(ProtocolVersion version) { switch (version) { case V1: return CBUtil.sizeOfValueList(values) + CBUtil.sizeOfConsistencyLevel(consistency); case V2: case V3: int size = 0; size += CBUtil.sizeOfConsistencyLevel(consistency); size += 1; // flags if (flags.contains(QueryFlag.VALUES)) size += CBUtil.sizeOfValueList(values); if (flags.contains(QueryFlag.PAGE_SIZE)) size += 4; if (flags.contains(QueryFlag.PAGING_STATE)) size += CBUtil.sizeOfValue(pagingState); if (flags.contains(QueryFlag.SERIAL_CONSISTENCY)) size += CBUtil.sizeOfConsistencyLevel(serialConsistency); if (version == ProtocolVersion.V3 && flags.contains(QueryFlag.DEFAULT_TIMESTAMP)) size += 8; return size; default: throw version.unsupported(); } } @Override public String toString() { return String.format("[cl=%s, vals=%s, skip=%b, psize=%d, state=%s, serialCl=%s]", consistency, values, skipMetadata, pageSize, pagingState, serialConsistency); } } public static class Batch extends Message.Request { public static final Message.Coder<Batch> coder = new Message.Coder<Batch>() { public void encode(Batch msg, ByteBuf dest, ProtocolVersion version) { int queries = msg.queryOrIdList.size(); assert queries <= 0xFFFF; dest.writeByte(fromType(msg.type)); dest.writeShort(queries); for (int i = 0; i < queries; i++) { Object q = msg.queryOrIdList.get(i); dest.writeByte((byte)(q instanceof String ? 0 : 1)); if (q instanceof String) CBUtil.writeLongString((String)q, dest); else CBUtil.writeBytes(((MD5Digest)q).bytes, dest); CBUtil.writeValueList(msg.values.get(i), dest); } msg.options.encode(dest, version); } public int encodedSize(Batch msg, ProtocolVersion version) { int size = 3; // type + nb queries for (int i = 0; i < msg.queryOrIdList.size(); i++) { Object q = msg.queryOrIdList.get(i); size += 1 + (q instanceof String ? CBUtil.sizeOfLongString((String)q) : CBUtil.sizeOfBytes(((MD5Digest)q).bytes)); size += CBUtil.sizeOfValueList(msg.values.get(i)); } size += msg.options.encodedSize(version); return size; } private byte fromType(BatchStatement.Type type) { switch (type) { case LOGGED: return 0; case UNLOGGED: return 1; case COUNTER: return 2; default: throw new AssertionError(); } } }; public final BatchStatement.Type type; public final List<Object> queryOrIdList; public final List<List<ByteBuffer>> values; public final BatchProtocolOptions options; public Batch(BatchStatement.Type type, List<Object> queryOrIdList, List<List<ByteBuffer>> values, BatchProtocolOptions options) { super(Message.Request.Type.BATCH); this.type = type; this.queryOrIdList = queryOrIdList; this.values = values; this.options = options; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("BATCH of ["); for (int i = 0; i < queryOrIdList.size(); i++) { if (i > 0) sb.append(", "); sb.append(queryOrIdList.get(i)).append(" with ").append(values.get(i).size()).append(" values"); } sb.append("] with options ").append(options); return sb.toString(); } } public static class BatchProtocolOptions { private final EnumSet<QueryFlag> flags = EnumSet.noneOf(QueryFlag.class); public final ConsistencyLevel consistency; public final ConsistencyLevel serialConsistency; public final long defaultTimestamp; public BatchProtocolOptions(ConsistencyLevel consistency, ConsistencyLevel serialConsistency, long defaultTimestamp) { this.consistency = consistency; this.serialConsistency = serialConsistency; this.defaultTimestamp = defaultTimestamp; if (serialConsistency != ConsistencyLevel.SERIAL) flags.add(QueryFlag.SERIAL_CONSISTENCY); if (defaultTimestamp != Long.MIN_VALUE) flags.add(QueryFlag.DEFAULT_TIMESTAMP); } public void encode(ByteBuf dest, ProtocolVersion version) { switch (version) { case V2: CBUtil.writeConsistencyLevel(consistency, dest); break; case V3: CBUtil.writeConsistencyLevel(consistency, dest); dest.writeByte((byte)QueryFlag.serialize(flags)); if (flags.contains(QueryFlag.SERIAL_CONSISTENCY)) CBUtil.writeConsistencyLevel(serialConsistency, dest); if (flags.contains(QueryFlag.DEFAULT_TIMESTAMP)) dest.writeLong(defaultTimestamp); break; default: throw version.unsupported(); } } public int encodedSize(ProtocolVersion version) { switch (version) { case V2: return CBUtil.sizeOfConsistencyLevel(consistency); case V3: int size = 0; size += CBUtil.sizeOfConsistencyLevel(consistency); size += 1; // flags if (flags.contains(QueryFlag.SERIAL_CONSISTENCY)) size += CBUtil.sizeOfConsistencyLevel(serialConsistency); if (flags.contains(QueryFlag.DEFAULT_TIMESTAMP)) size += 8; return size; default: throw version.unsupported(); } } @Override public String toString() { return String.format("[cl=%s, serialCl=%s, defaultTs=%d]", consistency, serialConsistency, defaultTimestamp); } } public static class Prepare extends Message.Request { public static final Message.Coder<Prepare> coder = new Message.Coder<Prepare>() { public void encode(Prepare msg, ByteBuf dest, ProtocolVersion version) { CBUtil.writeLongString(msg.query, dest); } public int encodedSize(Prepare msg, ProtocolVersion version) { return CBUtil.sizeOfLongString(msg.query); } }; private final String query; public Prepare(String query) { super(Message.Request.Type.PREPARE); this.query = query; } @Override public String toString() { return "PREPARE " + query; } } public static class Register extends Message.Request { public static final Message.Coder<Register> coder = new Message.Coder<Register>() { public void encode(Register msg, ByteBuf dest, ProtocolVersion version) { dest.writeShort(msg.eventTypes.size()); for (ProtocolEvent.Type type : msg.eventTypes) CBUtil.writeEnumValue(type, dest); } public int encodedSize(Register msg, ProtocolVersion version) { int size = 2; for (ProtocolEvent.Type type : msg.eventTypes) size += CBUtil.sizeOfEnumValue(type); return size; } }; private final List<ProtocolEvent.Type> eventTypes; public Register(List<ProtocolEvent.Type> eventTypes) { super(Message.Request.Type.REGISTER); this.eventTypes = eventTypes; } @Override public String toString() { return "REGISTER " + eventTypes; } } public static class AuthResponse extends Message.Request { public static final Message.Coder<AuthResponse> coder = new Message.Coder<AuthResponse>() { public void encode(AuthResponse response, ByteBuf dest, ProtocolVersion version) { CBUtil.writeValue(response.token, dest); } public int encodedSize(AuthResponse response, ProtocolVersion version) { return CBUtil.sizeOfValue(response.token); } }; private final byte[] token; public AuthResponse(byte[] token) { super(Message.Request.Type.AUTH_RESPONSE); this.token = token; } } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.epoll; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.CompositeByteBuf; import io.netty.channel.AddressedEnvelope; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelMetadata; import io.netty.channel.ChannelOption; import io.netty.channel.ChannelOutboundBuffer; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.channel.DefaultAddressedEnvelope; import io.netty.channel.socket.DatagramChannel; import io.netty.channel.socket.DatagramChannelConfig; import io.netty.channel.socket.DatagramPacket; import io.netty.channel.unix.DatagramSocketAddress; import io.netty.channel.unix.FileDescriptor; import io.netty.channel.unix.Socket; import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.StringUtil; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.NetworkInterface; import java.net.SocketAddress; import java.net.SocketException; import java.nio.ByteBuffer; import java.nio.channels.NotYetConnectedException; import java.util.ArrayList; import java.util.List; import static io.netty.channel.unix.Socket.newSocketDgram; /** * {@link DatagramChannel} implementation that uses linux EPOLL Edge-Triggered Mode for * maximal performance. */ public final class EpollDatagramChannel extends AbstractEpollChannel implements DatagramChannel { private static final ChannelMetadata METADATA = new ChannelMetadata(true); private static final String EXPECTED_TYPES = " (expected: " + StringUtil.simpleClassName(DatagramPacket.class) + ", " + StringUtil.simpleClassName(AddressedEnvelope.class) + '<' + StringUtil.simpleClassName(ByteBuf.class) + ", " + StringUtil.simpleClassName(InetSocketAddress.class) + ">, " + StringUtil.simpleClassName(ByteBuf.class) + ')'; private volatile InetSocketAddress local; private volatile InetSocketAddress remote; private volatile boolean connected; private final EpollDatagramChannelConfig config; public EpollDatagramChannel() { super(newSocketDgram(), Native.EPOLLIN); config = new EpollDatagramChannelConfig(this); } /** * @deprecated Use {@link #EpollDatagramChannel(Socket)}. */ @Deprecated public EpollDatagramChannel(FileDescriptor fd) { this(new Socket(fd.intValue())); } public EpollDatagramChannel(Socket fd) { super(null, fd, Native.EPOLLIN, true); // As we create an EpollDatagramChannel from a FileDescriptor we should try to obtain the remote and local // address from it. This is needed as the FileDescriptor may be bound already. local = fd.localAddress(); config = new EpollDatagramChannelConfig(this); } @Override public InetSocketAddress remoteAddress() { return (InetSocketAddress) super.remoteAddress(); } @Override public InetSocketAddress localAddress() { return (InetSocketAddress) super.localAddress(); } @Override public ChannelMetadata metadata() { return METADATA; } @Override @SuppressWarnings("deprecation") public boolean isActive() { return fd().isOpen() && (config.getOption(ChannelOption.DATAGRAM_CHANNEL_ACTIVE_ON_REGISTRATION) && isRegistered() || active); } @Override public boolean isConnected() { return connected; } @Override public ChannelFuture joinGroup(InetAddress multicastAddress) { return joinGroup(multicastAddress, newPromise()); } @Override public ChannelFuture joinGroup(InetAddress multicastAddress, ChannelPromise promise) { try { return joinGroup( multicastAddress, NetworkInterface.getByInetAddress(localAddress().getAddress()), null, promise); } catch (SocketException e) { promise.setFailure(e); } return promise; } @Override public ChannelFuture joinGroup( InetSocketAddress multicastAddress, NetworkInterface networkInterface) { return joinGroup(multicastAddress, networkInterface, newPromise()); } @Override public ChannelFuture joinGroup( InetSocketAddress multicastAddress, NetworkInterface networkInterface, ChannelPromise promise) { return joinGroup(multicastAddress.getAddress(), networkInterface, null, promise); } @Override public ChannelFuture joinGroup( InetAddress multicastAddress, NetworkInterface networkInterface, InetAddress source) { return joinGroup(multicastAddress, networkInterface, source, newPromise()); } @Override public ChannelFuture joinGroup( final InetAddress multicastAddress, final NetworkInterface networkInterface, final InetAddress source, final ChannelPromise promise) { if (multicastAddress == null) { throw new NullPointerException("multicastAddress"); } if (networkInterface == null) { throw new NullPointerException("networkInterface"); } promise.setFailure(new UnsupportedOperationException("Multicast not supported")); return promise; } @Override public ChannelFuture leaveGroup(InetAddress multicastAddress) { return leaveGroup(multicastAddress, newPromise()); } @Override public ChannelFuture leaveGroup(InetAddress multicastAddress, ChannelPromise promise) { try { return leaveGroup( multicastAddress, NetworkInterface.getByInetAddress(localAddress().getAddress()), null, promise); } catch (SocketException e) { promise.setFailure(e); } return promise; } @Override public ChannelFuture leaveGroup( InetSocketAddress multicastAddress, NetworkInterface networkInterface) { return leaveGroup(multicastAddress, networkInterface, newPromise()); } @Override public ChannelFuture leaveGroup( InetSocketAddress multicastAddress, NetworkInterface networkInterface, ChannelPromise promise) { return leaveGroup(multicastAddress.getAddress(), networkInterface, null, promise); } @Override public ChannelFuture leaveGroup( InetAddress multicastAddress, NetworkInterface networkInterface, InetAddress source) { return leaveGroup(multicastAddress, networkInterface, source, newPromise()); } @Override public ChannelFuture leaveGroup( final InetAddress multicastAddress, final NetworkInterface networkInterface, final InetAddress source, final ChannelPromise promise) { if (multicastAddress == null) { throw new NullPointerException("multicastAddress"); } if (networkInterface == null) { throw new NullPointerException("networkInterface"); } promise.setFailure(new UnsupportedOperationException("Multicast not supported")); return promise; } @Override public ChannelFuture block( InetAddress multicastAddress, NetworkInterface networkInterface, InetAddress sourceToBlock) { return block(multicastAddress, networkInterface, sourceToBlock, newPromise()); } @Override public ChannelFuture block( final InetAddress multicastAddress, final NetworkInterface networkInterface, final InetAddress sourceToBlock, final ChannelPromise promise) { if (multicastAddress == null) { throw new NullPointerException("multicastAddress"); } if (sourceToBlock == null) { throw new NullPointerException("sourceToBlock"); } if (networkInterface == null) { throw new NullPointerException("networkInterface"); } promise.setFailure(new UnsupportedOperationException("Multicast not supported")); return promise; } @Override public ChannelFuture block(InetAddress multicastAddress, InetAddress sourceToBlock) { return block(multicastAddress, sourceToBlock, newPromise()); } @Override public ChannelFuture block( InetAddress multicastAddress, InetAddress sourceToBlock, ChannelPromise promise) { try { return block( multicastAddress, NetworkInterface.getByInetAddress(localAddress().getAddress()), sourceToBlock, promise); } catch (Throwable e) { promise.setFailure(e); } return promise; } @Override protected AbstractEpollUnsafe newUnsafe() { return new EpollDatagramChannelUnsafe(); } @Override protected InetSocketAddress localAddress0() { return local; } @Override protected InetSocketAddress remoteAddress0() { return remote; } @Override protected void doBind(SocketAddress localAddress) throws Exception { InetSocketAddress addr = (InetSocketAddress) localAddress; checkResolvable(addr); fd().bind(addr); local = fd().localAddress(); active = true; } @Override protected void doWrite(ChannelOutboundBuffer in) throws Exception { for (;;) { Object msg = in.current(); if (msg == null) { // Wrote all messages. clearFlag(Native.EPOLLOUT); break; } try { // Check if sendmmsg(...) is supported which is only the case for GLIBC 2.14+ if (Native.IS_SUPPORTING_SENDMMSG && in.size() > 1) { NativeDatagramPacketArray array = NativeDatagramPacketArray.getInstance(in); int cnt = array.count(); if (cnt >= 1) { // Try to use gathering writes via sendmmsg(...) syscall. int offset = 0; NativeDatagramPacketArray.NativeDatagramPacket[] packets = array.packets(); while (cnt > 0) { int send = Native.sendmmsg(fd().intValue(), packets, offset, cnt); if (send == 0) { // Did not write all messages. setFlag(Native.EPOLLOUT); return; } for (int i = 0; i < send; i++) { in.remove(); } cnt -= send; offset += send; } continue; } } boolean done = false; for (int i = config().getWriteSpinCount() - 1; i >= 0; i--) { if (doWriteMessage(msg)) { done = true; break; } } if (done) { in.remove(); } else { // Did not write all messages. setFlag(Native.EPOLLOUT); break; } } catch (IOException e) { // Continue on write error as a DatagramChannel can write to multiple remote peers // // See https://github.com/netty/netty/issues/2665 in.remove(e); } } } private boolean doWriteMessage(Object msg) throws Exception { final ByteBuf data; InetSocketAddress remoteAddress; if (msg instanceof AddressedEnvelope) { @SuppressWarnings("unchecked") AddressedEnvelope<ByteBuf, InetSocketAddress> envelope = (AddressedEnvelope<ByteBuf, InetSocketAddress>) msg; data = envelope.content(); remoteAddress = envelope.recipient(); } else { data = (ByteBuf) msg; remoteAddress = null; } final int dataLen = data.readableBytes(); if (dataLen == 0) { return true; } if (remoteAddress == null) { remoteAddress = remote; if (remoteAddress == null) { throw new NotYetConnectedException(); } } final int writtenBytes; if (data.hasMemoryAddress()) { long memoryAddress = data.memoryAddress(); writtenBytes = fd().sendToAddress(memoryAddress, data.readerIndex(), data.writerIndex(), remoteAddress.getAddress(), remoteAddress.getPort()); } else if (data instanceof CompositeByteBuf) { IovArray array = ((EpollEventLoop) eventLoop()).cleanArray(); array.add(data); int cnt = array.count(); assert cnt != 0; writtenBytes = fd().sendToAddresses(array.memoryAddress(0), cnt, remoteAddress.getAddress(), remoteAddress.getPort()); } else { ByteBuffer nioData = data.internalNioBuffer(data.readerIndex(), data.readableBytes()); writtenBytes = fd().sendTo(nioData, nioData.position(), nioData.limit(), remoteAddress.getAddress(), remoteAddress.getPort()); } return writtenBytes > 0; } @Override protected Object filterOutboundMessage(Object msg) { if (msg instanceof DatagramPacket) { DatagramPacket packet = (DatagramPacket) msg; ByteBuf content = packet.content(); if (content.hasMemoryAddress()) { return msg; } if (content.isDirect() && content instanceof CompositeByteBuf) { // Special handling of CompositeByteBuf to reduce memory copies if some of the Components // in the CompositeByteBuf are backed by a memoryAddress. CompositeByteBuf comp = (CompositeByteBuf) content; if (comp.isDirect() && comp.nioBufferCount() <= Native.IOV_MAX) { return msg; } } // We can only handle direct buffers so we need to copy if a non direct is // passed to write. return new DatagramPacket(newDirectBuffer(packet, content), packet.recipient()); } if (msg instanceof ByteBuf) { ByteBuf buf = (ByteBuf) msg; if (!buf.hasMemoryAddress() && (PlatformDependent.hasUnsafe() || !buf.isDirect())) { if (buf instanceof CompositeByteBuf) { // Special handling of CompositeByteBuf to reduce memory copies if some of the Components // in the CompositeByteBuf are backed by a memoryAddress. CompositeByteBuf comp = (CompositeByteBuf) buf; if (!comp.isDirect() || comp.nioBufferCount() > Native.IOV_MAX) { // more then 1024 buffers for gathering writes so just do a memory copy. buf = newDirectBuffer(buf); assert buf.hasMemoryAddress(); } } else { // We can only handle buffers with memory address so we need to copy if a non direct is // passed to write. buf = newDirectBuffer(buf); assert buf.hasMemoryAddress(); } } return buf; } if (msg instanceof AddressedEnvelope) { @SuppressWarnings("unchecked") AddressedEnvelope<Object, SocketAddress> e = (AddressedEnvelope<Object, SocketAddress>) msg; if (e.content() instanceof ByteBuf && (e.recipient() == null || e.recipient() instanceof InetSocketAddress)) { ByteBuf content = (ByteBuf) e.content(); if (content.hasMemoryAddress()) { return e; } if (content instanceof CompositeByteBuf) { // Special handling of CompositeByteBuf to reduce memory copies if some of the Components // in the CompositeByteBuf are backed by a memoryAddress. CompositeByteBuf comp = (CompositeByteBuf) content; if (comp.isDirect() && comp.nioBufferCount() <= Native.IOV_MAX) { return e; } } // We can only handle direct buffers so we need to copy if a non direct is // passed to write. return new DefaultAddressedEnvelope<ByteBuf, InetSocketAddress>( newDirectBuffer(e, content), (InetSocketAddress) e.recipient()); } } throw new UnsupportedOperationException( "unsupported message type: " + StringUtil.simpleClassName(msg) + EXPECTED_TYPES); } @Override public EpollDatagramChannelConfig config() { return config; } @Override protected void doDisconnect() throws Exception { connected = false; } final class EpollDatagramChannelUnsafe extends AbstractEpollUnsafe { private final List<Object> readBuf = new ArrayList<Object>(); @Override public void connect(SocketAddress remote, SocketAddress local, ChannelPromise channelPromise) { boolean success = false; try { try { boolean wasActive = isActive(); InetSocketAddress remoteAddress = (InetSocketAddress) remote; if (local != null) { InetSocketAddress localAddress = (InetSocketAddress) local; doBind(localAddress); } checkResolvable(remoteAddress); EpollDatagramChannel.this.remote = remoteAddress; EpollDatagramChannel.this.local = fd().localAddress(); success = true; // First notify the promise before notifying the handler. channelPromise.trySuccess(); // Regardless if the connection attempt was cancelled, channelActive() event should be triggered, // because what happened is what happened. if (!wasActive && isActive()) { pipeline().fireChannelActive(); } } finally { if (!success) { doClose(); } else { connected = true; } } } catch (Throwable cause) { channelPromise.tryFailure(cause); } } @Override void epollInReady() { assert eventLoop().inEventLoop(); if (fd().isInputShutdown()) { return; } DatagramChannelConfig config = config(); final EpollRecvByteAllocatorHandle allocHandle = recvBufAllocHandle(); allocHandle.edgeTriggered(isFlagSet(Native.EPOLLET)); if (!readPending && !allocHandle.isEdgeTriggered() && !config.isAutoRead()) { // ChannelConfig.setAutoRead(false) was called in the meantime clearEpollIn0(); return; } final ChannelPipeline pipeline = pipeline(); final ByteBufAllocator allocator = config.getAllocator(); allocHandle.reset(config); Throwable exception = null; try { ByteBuf data = null; try { do { data = allocHandle.allocate(allocator); allocHandle.attemptedBytesRead(data.writableBytes()); final DatagramSocketAddress remoteAddress; if (data.hasMemoryAddress()) { // has a memory address so use optimized call remoteAddress = fd().recvFromAddress(data.memoryAddress(), data.writerIndex(), data.capacity()); } else { ByteBuffer nioData = data.internalNioBuffer(data.writerIndex(), data.writableBytes()); remoteAddress = fd().recvFrom(nioData, nioData.position(), nioData.limit()); } epollInReadAttempted(); if (remoteAddress == null) { allocHandle.lastBytesRead(-1); data.release(); data = null; break; } allocHandle.incMessagesRead(1); allocHandle.lastBytesRead(remoteAddress.receivedAmount()); data.writerIndex(data.writerIndex() + allocHandle.lastBytesRead()); readBuf.add(new DatagramPacket(data, (InetSocketAddress) localAddress(), remoteAddress)); data = null; } while (allocHandle.continueReading()); } catch (Throwable t) { if (data != null) { data.release(); } exception = t; } int size = readBuf.size(); for (int i = 0; i < size; i ++) { pipeline.fireChannelRead(readBuf.get(i)); } readBuf.clear(); allocHandle.readComplete(); maybeMoreDataToRead = allocHandle.maybeMoreDataToRead(); pipeline.fireChannelReadComplete(); if (exception != null) { pipeline.fireExceptionCaught(exception); checkResetEpollIn(allocHandle.isEdgeTriggered()); } } finally { epollInFinally(config); } } } }
/** * Copyright 2005 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.workflow.instance.node; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.regex.Matcher; import org.drools.core.common.InternalAgenda; import org.drools.core.common.InternalKnowledgeRuntime; import org.drools.core.process.core.datatype.DataType; import org.drools.core.util.MVELSafeHelper; import org.jbpm.process.core.context.variable.Variable; import org.jbpm.process.core.context.variable.VariableScope; import org.jbpm.process.core.impl.DataTransformerRegistry; import org.jbpm.process.instance.context.variable.VariableScopeInstance; import org.jbpm.workflow.core.node.DataAssociation; import org.jbpm.workflow.core.node.RuleSetNode; import org.jbpm.workflow.core.node.Transformation; import org.jbpm.workflow.instance.impl.NodeInstanceResolverFactory; import org.kie.api.runtime.KieSession; import org.kie.api.runtime.process.DataTransformer; import org.kie.api.runtime.process.EventListener; import org.kie.api.runtime.process.NodeInstance; import org.kie.api.runtime.rule.FactHandle; import org.kie.internal.runtime.KnowledgeRuntime; import org.kie.internal.runtime.StatefulKnowledgeSession; import org.mvel2.integration.impl.MapVariableResolverFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Runtime counterpart of a ruleset node. * */ public class RuleSetNodeInstance extends StateBasedNodeInstance implements EventListener { private static final long serialVersionUID = 510l; private static final Logger logger = LoggerFactory.getLogger(RuleSetNodeInstance.class); private static final String ACT_AS_WAIT_STATE_PROPERTY = "org.jbpm.rule.task.waitstate"; private Map<String, FactHandle> factHandles = new HashMap<String, FactHandle>(); private String ruleFlowGroup; protected RuleSetNode getRuleSetNode() { return (RuleSetNode) getNode(); } public void internalTrigger(final NodeInstance from, String type) { super.internalTrigger(from, type); // if node instance was cancelled, abort if (getNodeInstanceContainer().getNodeInstance(getId()) == null) { return; } if ( !org.jbpm.workflow.core.Node.CONNECTION_DEFAULT_TYPE.equals( type ) ) { throw new IllegalArgumentException( "A RuleSetNode only accepts default incoming connections!" ); } // first set rule flow group setRuleFlowGroup(resolveRuleFlowGroup(getRuleSetNode().getRuleFlowGroup())); //proceed KnowledgeRuntime kruntime = getProcessInstance().getKnowledgeRuntime(); Map<String, Object> inputs = evaluateParameters(getRuleSetNode()); for (Entry<String, Object> entry : inputs.entrySet()) { String inputKey = getRuleFlowGroup() + "_" +getProcessInstance().getId() +"_"+entry.getKey(); factHandles.put(inputKey, kruntime.insert(entry.getValue())); } if (actAsWaitState()) { addRuleSetListener(); ((InternalAgenda) getProcessInstance().getKnowledgeRuntime().getAgenda()) .activateRuleFlowGroup( getRuleFlowGroup(), getProcessInstance().getId(), getUniqueId() ); } else { ((InternalAgenda) getProcessInstance().getKnowledgeRuntime().getAgenda()) .activateRuleFlowGroup( getRuleFlowGroup(), getProcessInstance().getId(), getUniqueId() ); ((KieSession)getProcessInstance().getKnowledgeRuntime()).fireAllRules(); removeEventListeners(); retractFacts(); triggerCompleted(); } } public void addEventListeners() { super.addEventListeners(); addRuleSetListener(); } private String getRuleSetEventType() { InternalKnowledgeRuntime kruntime = getProcessInstance().getKnowledgeRuntime(); if (kruntime instanceof StatefulKnowledgeSession) { return "RuleFlowGroup_" + getRuleFlowGroup() + "_" + ((StatefulKnowledgeSession) kruntime).getIdentifier(); } else { return "RuleFlowGroup_" + getRuleFlowGroup(); } } private void addRuleSetListener() { getProcessInstance().addEventListener(getRuleSetEventType(), this, true); } public void removeEventListeners() { super.removeEventListeners(); getProcessInstance().removeEventListener(getRuleSetEventType(), this, true); } public void cancel() { super.cancel(); ((InternalAgenda) getProcessInstance().getKnowledgeRuntime().getAgenda()).deactivateRuleFlowGroup(getRuleFlowGroup()); } public void signalEvent(String type, Object event) { if (getRuleSetEventType().equals(type)) { removeEventListeners(); retractFacts(); triggerCompleted(); } } public void retractFacts() { Map<String, Object> objects = new HashMap<String, Object>(); KnowledgeRuntime kruntime = getProcessInstance().getKnowledgeRuntime(); for (Entry<String, FactHandle> entry : factHandles.entrySet()) { Object object = ((StatefulKnowledgeSession)kruntime).getObject(entry.getValue()); String key = entry.getKey(); key = key.replaceAll(getRuleFlowGroup()+"_", ""); key = key.replaceAll(getProcessInstance().getId()+"_", ""); objects.put(key , object); kruntime.delete(entry.getValue()); } RuleSetNode ruleSetNode = getRuleSetNode(); if (ruleSetNode != null) { for (Iterator<DataAssociation> iterator = ruleSetNode.getOutAssociations().iterator(); iterator.hasNext(); ) { DataAssociation association = iterator.next(); if (association.getTransformation() != null) { Transformation transformation = association.getTransformation(); DataTransformer transformer = DataTransformerRegistry.get().find(transformation.getLanguage()); if (transformer != null) { Object parameterValue = transformer.transform(transformation.getCompiledExpression(), objects); VariableScopeInstance variableScopeInstance = (VariableScopeInstance) resolveContextInstance(VariableScope.VARIABLE_SCOPE, association.getTarget()); if (variableScopeInstance != null && parameterValue != null) { variableScopeInstance.setVariable(association.getTarget(), parameterValue); } else { logger.warn("Could not find variable scope for variable {}", association.getTarget()); logger.warn("Continuing without setting variable."); } if (parameterValue != null) { variableScopeInstance.setVariable(association.getTarget(), parameterValue); } } } else if (association.getAssignments() == null || association.getAssignments().isEmpty()) { VariableScopeInstance variableScopeInstance = (VariableScopeInstance) resolveContextInstance(VariableScope.VARIABLE_SCOPE, association.getTarget()); if (variableScopeInstance != null) { Object value = objects.get(association.getSources().get(0)); if (value == null) { try { value = MVELSafeHelper.getEvaluator().eval(association.getSources().get(0), new MapVariableResolverFactory(objects)); } catch (Throwable t) { // do nothing } } Variable varDef = variableScopeInstance.getVariableScope().findVariable(association.getTarget()); DataType dataType = varDef.getType(); // exclude java.lang.Object as it is considered unknown type if (!dataType.getStringType().endsWith("java.lang.Object") && value instanceof String) { value = dataType.readValue((String) value); } variableScopeInstance.setVariable(association.getTarget(), value); } else { logger.warn("Could not find variable scope for variable {}", association.getTarget()); } } } } factHandles.clear(); } protected Map<String, Object> evaluateParameters(RuleSetNode ruleSetNode) { Map<String, Object> replacements = new HashMap<String, Object>(); for (Iterator<DataAssociation> iterator = ruleSetNode.getInAssociations().iterator(); iterator.hasNext(); ) { DataAssociation association = iterator.next(); if (association.getTransformation() != null) { Transformation transformation = association.getTransformation(); DataTransformer transformer = DataTransformerRegistry.get().find(transformation.getLanguage()); if (transformer != null) { Object parameterValue = transformer.transform(transformation.getCompiledExpression(), getSourceParameters(association)); if (parameterValue != null) { replacements.put(association.getTarget(), parameterValue); } } } else if (association.getAssignments() == null || association.getAssignments().isEmpty()) { Object parameterValue = null; VariableScopeInstance variableScopeInstance = (VariableScopeInstance) resolveContextInstance(VariableScope.VARIABLE_SCOPE, association.getSources().get(0)); if (variableScopeInstance != null) { parameterValue = variableScopeInstance.getVariable(association.getSources().get(0)); } else { try { parameterValue = MVELSafeHelper.getEvaluator().eval(association.getSources().get(0), new NodeInstanceResolverFactory(this)); } catch (Throwable t) { logger.error("Could not find variable scope for variable {}", association.getSources().get(0)); logger.error("when trying to execute RuleSetNode {}", ruleSetNode.getName()); logger.error("Continuing without setting parameter."); } } if (parameterValue != null) { replacements.put(association.getTarget(), parameterValue); } } } for (Map.Entry<String, Object> entry: ruleSetNode.getParameters().entrySet()) { if (entry.getValue() instanceof String) { Object value = resolveVariable(entry.getValue()); if (value != null) { replacements.put(entry.getKey(), value); } } } return replacements; } private Object resolveVariable(Object s) { if (s instanceof String) { Matcher matcher = PARAMETER_MATCHER.matcher((String) s); while (matcher.find()) { String paramName = matcher.group(1); VariableScopeInstance variableScopeInstance = (VariableScopeInstance) resolveContextInstance(VariableScope.VARIABLE_SCOPE, paramName); if (variableScopeInstance != null) { Object variableValue = variableScopeInstance.getVariable(paramName); if (variableValue != null) { return variableValue; } } else { try { Object variableValue = MVELSafeHelper.getEvaluator().eval(paramName, new NodeInstanceResolverFactory(this)); if (variableValue != null) { return variableValue; } } catch (Throwable t) { logger.error("Could not find variable scope for variable {}", paramName); } } } } return s; } protected Map<String, Object> getSourceParameters(DataAssociation association) { Map<String, Object> parameters = new HashMap<String, Object>(); for (String sourceParam : association.getSources()) { Object parameterValue = null; VariableScopeInstance variableScopeInstance = (VariableScopeInstance) resolveContextInstance(VariableScope.VARIABLE_SCOPE, sourceParam); if (variableScopeInstance != null) { parameterValue = variableScopeInstance.getVariable(sourceParam); } else { try { parameterValue = MVELSafeHelper.getEvaluator().eval(sourceParam, new NodeInstanceResolverFactory(this)); } catch (Throwable t) { logger.warn("Could not find variable scope for variable {}", sourceParam); } } if (parameterValue != null) { parameters.put(association.getTarget(), parameterValue); } } return parameters; } private String resolveRuleFlowGroup(String origin) { return (String) resolveVariable(origin); } public Map<String, FactHandle> getFactHandles() { return factHandles; } public void setFactHandles(Map<String, FactHandle> factHandles) { this.factHandles = factHandles; } public String getRuleFlowGroup() { if (ruleFlowGroup == null || ruleFlowGroup.trim().length() == 0) { ruleFlowGroup = getRuleSetNode().getRuleFlowGroup(); } return ruleFlowGroup; } public void setRuleFlowGroup(String ruleFlowGroup) { this.ruleFlowGroup = ruleFlowGroup; } protected boolean actAsWaitState() { Object asWaitState = getProcessInstance().getKnowledgeRuntime().getEnvironment().get(ACT_AS_WAIT_STATE_PROPERTY); if (asWaitState != null) { return Boolean.parseBoolean(asWaitState.toString()); } return false; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.geo; import org.apache.lucene.util.Bits; import org.apache.lucene.util.SloppyMath; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.GeoPointValues; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; import java.io.IOException; import java.util.Locale; /** * Geo distance calculation. */ public enum GeoDistance implements Writeable<GeoDistance> { /** * Calculates distance as points on a plane. Faster, but less accurate than {@link #ARC}. */ PLANE { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double px = targetLongitude - sourceLongitude; double py = targetLatitude - sourceLatitude; return Math.sqrt(px * px + py * py) * unit.getDistancePerDegree(); } @Override public double normalize(double distance, DistanceUnit unit) { return distance; } @Override public FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { return new PlaneFixedSourceDistance(sourceLatitude, sourceLongitude, unit); } }, /** * Calculates distance factor. */ FACTOR { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double longitudeDifference = targetLongitude - sourceLongitude; double a = Math.toRadians(90D - sourceLatitude); double c = Math.toRadians(90D - targetLatitude); return (Math.cos(a) * Math.cos(c)) + (Math.sin(a) * Math.sin(c) * Math.cos(Math.toRadians(longitudeDifference))); } @Override public double normalize(double distance, DistanceUnit unit) { return Math.cos(distance / unit.getEarthRadius()); } @Override public FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { return new FactorFixedSourceDistance(sourceLatitude, sourceLongitude, unit); } }, /** * Calculates distance as points on a globe. */ ARC { @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { double x1 = sourceLatitude * Math.PI / 180D; double x2 = targetLatitude * Math.PI / 180D; double h1 = 1D - Math.cos(x1 - x2); double h2 = 1D - Math.cos((sourceLongitude - targetLongitude) * Math.PI / 180D); double h = (h1 + Math.cos(x1) * Math.cos(x2) * h2) / 2; double averageLatitude = (x1 + x2) / 2; double diameter = GeoUtils.earthDiameter(averageLatitude); return unit.fromMeters(diameter * Math.asin(Math.min(1, Math.sqrt(h)))); } @Override public double normalize(double distance, DistanceUnit unit) { return distance; } @Override public FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { return new ArcFixedSourceDistance(sourceLatitude, sourceLongitude, unit); } }, /** * Calculates distance as points on a globe in a sloppy way. Close to the pole areas the accuracy * of this function decreases. */ SLOPPY_ARC { @Override public double normalize(double distance, DistanceUnit unit) { return distance; } @Override public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) { return unit.fromMeters(SloppyMath.haversin(sourceLatitude, sourceLongitude, targetLatitude, targetLongitude) * 1000.0); } @Override public FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { return new SloppyArcFixedSourceDistance(sourceLatitude, sourceLongitude, unit); } }; /** Returns a GeoDistance object as read from the StreamInput. */ @Override public GeoDistance readFrom(StreamInput in) throws IOException { int ord = in.readVInt(); if (ord < 0 || ord >= values().length) { throw new IOException("Unknown GeoDistance ordinal [" + ord + "]"); } return GeoDistance.values()[ord]; } public static GeoDistance readGeoDistanceFrom(StreamInput in) throws IOException { return DEFAULT.readFrom(in); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(this.ordinal()); } /** * Default {@link GeoDistance} function. This method should be used, If no specific function has been selected. * This is an alias for <code>SLOPPY_ARC</code> */ public static final GeoDistance DEFAULT = SLOPPY_ARC; public abstract double normalize(double distance, DistanceUnit unit); public abstract double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit); public abstract FixedSourceDistance fixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit); private static final double MIN_LAT = Math.toRadians(-90d); // -PI/2 private static final double MAX_LAT = Math.toRadians(90d); // PI/2 private static final double MIN_LON = Math.toRadians(-180d); // -PI private static final double MAX_LON = Math.toRadians(180d); // PI public static DistanceBoundingCheck distanceBoundingCheck(double sourceLatitude, double sourceLongitude, double distance, DistanceUnit unit) { // angular distance in radians on a great circle // assume worst-case: use the minor axis double radDist = unit.toMeters(distance) / GeoUtils.EARTH_SEMI_MINOR_AXIS; double radLat = Math.toRadians(sourceLatitude); double radLon = Math.toRadians(sourceLongitude); double minLat = radLat - radDist; double maxLat = radLat + radDist; double minLon, maxLon; if (minLat > MIN_LAT && maxLat < MAX_LAT) { double deltaLon = Math.asin(Math.sin(radDist) / Math.cos(radLat)); minLon = radLon - deltaLon; if (minLon < MIN_LON) minLon += 2d * Math.PI; maxLon = radLon + deltaLon; if (maxLon > MAX_LON) maxLon -= 2d * Math.PI; } else { // a pole is within the distance minLat = Math.max(minLat, MIN_LAT); maxLat = Math.min(maxLat, MAX_LAT); minLon = MIN_LON; maxLon = MAX_LON; } GeoPoint topLeft = new GeoPoint(Math.toDegrees(maxLat), Math.toDegrees(minLon)); GeoPoint bottomRight = new GeoPoint(Math.toDegrees(minLat), Math.toDegrees(maxLon)); if (minLon > maxLon) { return new Meridian180DistanceBoundingCheck(topLeft, bottomRight); } return new SimpleDistanceBoundingCheck(topLeft, bottomRight); } /** * Get a {@link GeoDistance} according to a given name. Valid values are * * <ul> * <li><b>plane</b> for <code>GeoDistance.PLANE</code></li> * <li><b>sloppy_arc</b> for <code>GeoDistance.SLOPPY_ARC</code></li> * <li><b>factor</b> for <code>GeoDistance.FACTOR</code></li> * <li><b>arc</b> for <code>GeoDistance.ARC</code></li> * </ul> * * @param name name of the {@link GeoDistance} * @return a {@link GeoDistance} */ public static GeoDistance fromString(String name) { name = name.toLowerCase(Locale.ROOT); if ("plane".equals(name)) { return PLANE; } else if ("arc".equals(name)) { return ARC; } else if ("sloppy_arc".equals(name)) { return SLOPPY_ARC; } else if ("factor".equals(name)) { return FACTOR; } throw new IllegalArgumentException("No geo distance for [" + name + "]"); } public static interface FixedSourceDistance { double calculate(double targetLatitude, double targetLongitude); } public static interface DistanceBoundingCheck { boolean isWithin(double targetLatitude, double targetLongitude); GeoPoint topLeft(); GeoPoint bottomRight(); } public static final AlwaysDistanceBoundingCheck ALWAYS_INSTANCE = new AlwaysDistanceBoundingCheck(); private static class AlwaysDistanceBoundingCheck implements DistanceBoundingCheck { @Override public boolean isWithin(double targetLatitude, double targetLongitude) { return true; } @Override public GeoPoint topLeft() { return null; } @Override public GeoPoint bottomRight() { return null; } } public static class Meridian180DistanceBoundingCheck implements DistanceBoundingCheck { private final GeoPoint topLeft; private final GeoPoint bottomRight; public Meridian180DistanceBoundingCheck(GeoPoint topLeft, GeoPoint bottomRight) { this.topLeft = topLeft; this.bottomRight = bottomRight; } @Override public boolean isWithin(double targetLatitude, double targetLongitude) { return (targetLatitude >= bottomRight.lat() && targetLatitude <= topLeft.lat()) && (targetLongitude >= topLeft.lon() || targetLongitude <= bottomRight.lon()); } @Override public GeoPoint topLeft() { return topLeft; } @Override public GeoPoint bottomRight() { return bottomRight; } } public static class SimpleDistanceBoundingCheck implements DistanceBoundingCheck { private final GeoPoint topLeft; private final GeoPoint bottomRight; public SimpleDistanceBoundingCheck(GeoPoint topLeft, GeoPoint bottomRight) { this.topLeft = topLeft; this.bottomRight = bottomRight; } @Override public boolean isWithin(double targetLatitude, double targetLongitude) { return (targetLatitude >= bottomRight.lat() && targetLatitude <= topLeft.lat()) && (targetLongitude >= topLeft.lon() && targetLongitude <= bottomRight.lon()); } @Override public GeoPoint topLeft() { return topLeft; } @Override public GeoPoint bottomRight() { return bottomRight; } } public static class PlaneFixedSourceDistance implements FixedSourceDistance { private final double sourceLatitude; private final double sourceLongitude; private final double distancePerDegree; public PlaneFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { this.sourceLatitude = sourceLatitude; this.sourceLongitude = sourceLongitude; this.distancePerDegree = unit.getDistancePerDegree(); } @Override public double calculate(double targetLatitude, double targetLongitude) { double px = targetLongitude - sourceLongitude; double py = targetLatitude - sourceLatitude; return Math.sqrt(px * px + py * py) * distancePerDegree; } } public static class FactorFixedSourceDistance implements FixedSourceDistance { private final double sourceLongitude; private final double a; private final double sinA; private final double cosA; public FactorFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { this.sourceLongitude = sourceLongitude; this.a = Math.toRadians(90D - sourceLatitude); this.sinA = Math.sin(a); this.cosA = Math.cos(a); } @Override public double calculate(double targetLatitude, double targetLongitude) { double longitudeDifference = targetLongitude - sourceLongitude; double c = Math.toRadians(90D - targetLatitude); return (cosA * Math.cos(c)) + (sinA * Math.sin(c) * Math.cos(Math.toRadians(longitudeDifference))); } } /** * Basic implementation of {@link FixedSourceDistance}. This class keeps the basic parameters for a distance * functions based on a fixed source. Namely latitude, longitude and unit. */ public static abstract class FixedSourceDistanceBase implements FixedSourceDistance { protected final double sourceLatitude; protected final double sourceLongitude; protected final DistanceUnit unit; public FixedSourceDistanceBase(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { this.sourceLatitude = sourceLatitude; this.sourceLongitude = sourceLongitude; this.unit = unit; } } public static class ArcFixedSourceDistance extends FixedSourceDistanceBase { public ArcFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { super(sourceLatitude, sourceLongitude, unit); } @Override public double calculate(double targetLatitude, double targetLongitude) { return ARC.calculate(sourceLatitude, sourceLongitude, targetLatitude, targetLongitude, unit); } } public static class SloppyArcFixedSourceDistance extends FixedSourceDistanceBase { public SloppyArcFixedSourceDistance(double sourceLatitude, double sourceLongitude, DistanceUnit unit) { super(sourceLatitude, sourceLongitude, unit); } @Override public double calculate(double targetLatitude, double targetLongitude) { return SLOPPY_ARC.calculate(sourceLatitude, sourceLongitude, targetLatitude, targetLongitude, unit); } } /** * Return a {@link SortedNumericDoubleValues} instance that returns the distances to a list of geo-points for each document. */ public static SortedNumericDoubleValues distanceValues(final MultiGeoPointValues geoPointValues, final FixedSourceDistance... distances) { final GeoPointValues singleValues = FieldData.unwrapSingleton(geoPointValues); if (singleValues != null && distances.length == 1) { final Bits docsWithField = FieldData.unwrapSingletonBits(geoPointValues); return FieldData.singleton(new NumericDoubleValues() { @Override public double get(int docID) { if (docsWithField != null && !docsWithField.get(docID)) { return 0d; } final GeoPoint point = singleValues.get(docID); return distances[0].calculate(point.lat(), point.lon()); } }, docsWithField); } else { return new SortingNumericDoubleValues() { @Override public void setDocument(int doc) { geoPointValues.setDocument(doc); resize(geoPointValues.count() * distances.length); int valueCounter = 0; for (FixedSourceDistance distance : distances) { for (int i = 0; i < geoPointValues.count(); ++i) { final GeoPoint point = geoPointValues.valueAt(i); values[valueCounter] = distance.calculate(point.lat(), point.lon()); valueCounter++; } } sort(); } }; } } }
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.web.embedded.undertow; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import io.undertow.Handlers; import io.undertow.Undertow; import io.undertow.UndertowOptions; import io.undertow.server.HttpHandler; import io.undertow.server.handlers.accesslog.AccessLogHandler; import io.undertow.server.handlers.accesslog.DefaultAccessLogReceiver; import io.undertow.servlet.api.DeploymentInfo; import org.xnio.OptionMap; import org.xnio.Options; import org.xnio.Xnio; import org.xnio.XnioWorker; import org.springframework.boot.web.reactive.server.AbstractReactiveWebServerFactory; import org.springframework.boot.web.reactive.server.ReactiveWebServerFactory; import org.springframework.boot.web.server.WebServer; import org.springframework.http.server.reactive.UndertowHttpHandlerAdapter; import org.springframework.util.Assert; /** * {@link ReactiveWebServerFactory} that can be used to create {@link UndertowWebServer}s. * * @author Brian Clozel * @since 2.0.0 */ public class UndertowReactiveWebServerFactory extends AbstractReactiveWebServerFactory implements ConfigurableUndertowWebServerFactory { private List<UndertowBuilderCustomizer> builderCustomizers = new ArrayList<>(); private List<UndertowDeploymentInfoCustomizer> deploymentInfoCustomizers = new ArrayList<>(); private Integer bufferSize; private Integer ioThreads; private Integer workerThreads; private Boolean directBuffers; private File accessLogDirectory; private String accessLogPattern; private String accessLogPrefix; private String accessLogSuffix; private boolean accessLogEnabled = false; private boolean accessLogRotate = true; private boolean useForwardHeaders; /** * Create a new {@link UndertowReactiveWebServerFactory} instance. */ public UndertowReactiveWebServerFactory() { } /** * Create a new {@link UndertowReactiveWebServerFactory} that listens for requests * using the specified port. * @param port the port to listen on */ public UndertowReactiveWebServerFactory(int port) { super(port); } @Override public WebServer getWebServer( org.springframework.http.server.reactive.HttpHandler httpHandler) { Undertow.Builder builder = createBuilder(getPort()); Closeable closeable = configureHandler(builder, httpHandler); return new UndertowWebServer(builder, getPort() >= 0, closeable); } private Undertow.Builder createBuilder(int port) { Undertow.Builder builder = Undertow.builder(); if (this.bufferSize != null) { builder.setBufferSize(this.bufferSize); } if (this.ioThreads != null) { builder.setIoThreads(this.ioThreads); } if (this.workerThreads != null) { builder.setWorkerThreads(this.workerThreads); } if (this.directBuffers != null) { builder.setDirectBuffers(this.directBuffers); } if (getSsl() != null && getSsl().isEnabled()) { customizeSsl(builder); } else { builder.addHttpListener(port, getListenAddress()); } for (UndertowBuilderCustomizer customizer : this.builderCustomizers) { customizer.customize(builder); } return builder; } private Closeable configureHandler(Undertow.Builder builder, org.springframework.http.server.reactive.HttpHandler httpHandler) { HttpHandler handler = new UndertowHttpHandlerAdapter(httpHandler); if (this.useForwardHeaders) { handler = Handlers.proxyPeerAddress(handler); } handler = UndertowCompressionConfigurer.configureCompression(getCompression(), handler); Closeable closeable = null; if (isAccessLogEnabled()) { closeable = configureAccessLogHandler(builder, handler); } else { builder.setHandler(handler); } return closeable; } private Closeable configureAccessLogHandler(Undertow.Builder builder, HttpHandler handler) { try { createAccessLogDirectoryIfNecessary(); XnioWorker worker = createWorker(); String prefix = (this.accessLogPrefix != null) ? this.accessLogPrefix : "access_log."; DefaultAccessLogReceiver accessLogReceiver = new DefaultAccessLogReceiver( worker, this.accessLogDirectory, prefix, this.accessLogSuffix, this.accessLogRotate); String formatString = ((this.accessLogPattern != null) ? this.accessLogPattern : "common"); builder.setHandler(new AccessLogHandler(handler, accessLogReceiver, formatString, Undertow.class.getClassLoader())); return () -> { try { accessLogReceiver.close(); worker.shutdown(); } catch (IOException ex) { throw new IllegalStateException(ex); } }; } catch (IOException ex) { throw new IllegalStateException("Failed to create AccessLogHandler", ex); } } private void createAccessLogDirectoryIfNecessary() { Assert.state(this.accessLogDirectory != null, "Access log directory is not set"); if (!this.accessLogDirectory.isDirectory() && !this.accessLogDirectory.mkdirs()) { throw new IllegalStateException("Failed to create access log directory '" + this.accessLogDirectory + "'"); } } private XnioWorker createWorker() throws IOException { Xnio xnio = Xnio.getInstance(Undertow.class.getClassLoader()); return xnio.createWorker( OptionMap.builder().set(Options.THREAD_DAEMON, true).getMap()); } private void customizeSsl(Undertow.Builder builder) { new SslBuilderCustomizer(getPort(), getAddress(), getSsl(), getSslStoreProvider()) .customize(builder); if (getHttp2() != null) { builder.setServerOption(UndertowOptions.ENABLE_HTTP2, getHttp2().isEnabled()); } } private String getListenAddress() { if (getAddress() == null) { return "0.0.0.0"; } return getAddress().getHostAddress(); } /** * Set {@link UndertowDeploymentInfoCustomizer}s that should be applied to the * Undertow {@link DeploymentInfo}. Calling this method will replace any existing * customizers. * @param customizers the customizers to set */ public void setDeploymentInfoCustomizers( Collection<? extends UndertowDeploymentInfoCustomizer> customizers) { Assert.notNull(customizers, "Customizers must not be null"); this.deploymentInfoCustomizers = new ArrayList<>(customizers); } /** * Returns a mutable collection of the {@link UndertowDeploymentInfoCustomizer}s that * will be applied to the Undertow {@link DeploymentInfo}. * @return the customizers that will be applied */ public Collection<UndertowDeploymentInfoCustomizer> getDeploymentInfoCustomizers() { return this.deploymentInfoCustomizers; } @Override public void addDeploymentInfoCustomizers( UndertowDeploymentInfoCustomizer... customizers) { Assert.notNull(customizers, "UndertowDeploymentInfoCustomizers must not be null"); this.deploymentInfoCustomizers.addAll(Arrays.asList(customizers)); } @Override public void setAccessLogDirectory(File accessLogDirectory) { this.accessLogDirectory = accessLogDirectory; } @Override public void setAccessLogPattern(String accessLogPattern) { this.accessLogPattern = accessLogPattern; } @Override public void setAccessLogPrefix(String accessLogPrefix) { this.accessLogPrefix = accessLogPrefix; } @Override public void setAccessLogSuffix(String accessLogSuffix) { this.accessLogSuffix = accessLogSuffix; } public boolean isAccessLogEnabled() { return this.accessLogEnabled; } @Override public void setAccessLogEnabled(boolean accessLogEnabled) { this.accessLogEnabled = accessLogEnabled; } @Override public void setAccessLogRotate(boolean accessLogRotate) { this.accessLogRotate = accessLogRotate; } protected final boolean isUseForwardHeaders() { return this.useForwardHeaders; } @Override public void setUseForwardHeaders(boolean useForwardHeaders) { this.useForwardHeaders = useForwardHeaders; } @Override public void setBufferSize(Integer bufferSize) { this.bufferSize = bufferSize; } @Override public void setIoThreads(Integer ioThreads) { this.ioThreads = ioThreads; } @Override public void setWorkerThreads(Integer workerThreads) { this.workerThreads = workerThreads; } @Override public void setUseDirectBuffers(Boolean directBuffers) { this.directBuffers = directBuffers; } /** * Set {@link UndertowBuilderCustomizer}s that should be applied to the Undertow * {@link io.undertow.Undertow.Builder Builder}. Calling this method will replace any * existing customizers. * @param customizers the customizers to set */ public void setBuilderCustomizers( Collection<? extends UndertowBuilderCustomizer> customizers) { Assert.notNull(customizers, "Customizers must not be null"); this.builderCustomizers = new ArrayList<>(customizers); } /** * Returns a mutable collection of the {@link UndertowBuilderCustomizer}s that will be * applied to the Undertow {@link io.undertow.Undertow.Builder Builder}. * @return the customizers that will be applied */ public Collection<UndertowBuilderCustomizer> getBuilderCustomizers() { return this.builderCustomizers; } /** * Add {@link UndertowBuilderCustomizer}s that should be used to customize the * Undertow {@link io.undertow.Undertow.Builder Builder}. * @param customizers the customizers to add */ @Override public void addBuilderCustomizers(UndertowBuilderCustomizer... customizers) { Assert.notNull(customizers, "Customizers must not be null"); this.builderCustomizers.addAll(Arrays.asList(customizers)); } }
package org.jdal.ui.bind; import java.beans.PropertyDescriptor; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jdal.beans.PropertyUtils; import org.jdal.ui.Binder; import org.jdal.ui.ModelHolder; import org.springframework.beans.BeanWrapper; import org.springframework.beans.PropertyAccessor; import org.springframework.beans.PropertyAccessorFactory; import org.springframework.validation.BeanPropertyBindingResult; import org.springframework.validation.BindingResult; /** * Composite Binder groups property binders. * * @author Jose Luis Martin - (jlm@joseluismartin.info) * @param <T> default model to use when binding properties */ @SuppressWarnings("unchecked") public class CompositeBinder<T> implements Binder<T>, BinderHolder, ModelHolder<T>, Serializable { private static final Log log = LogFactory.getLog(CompositeBinder.class); private BinderFactory binderFactory; private Map<String, Binder<T>> binders = new HashMap<String, Binder<T>>(); /** Default model to bind on for property binders */ private T model; /** Binding result */ private BindingResult bindingResult; private List<String> ignoredProperties = new ArrayList<String>(); /** * Create new CompositeBinder */ public CompositeBinder() { } /** * @param model model to bind on */ public CompositeBinder(T model) { this.model = model; } public void bind(Object component, String propertyName) { bind(component, propertyName, false); } public void bind(Object component, String propertyName, boolean readOnly) { bind(component, propertyName, this, readOnly); } public void bind(Object component, String propertyName, Object model, boolean readOnly) { PropertyBinder binder = binderFactory.getBinder(component.getClass()); if (binder != null) { binder.bind(component, propertyName, model, readOnly); addBinder(propertyName, binder); } } public void addBinder(String propertyName, PropertyBinder binder) { binders.put(propertyName, (Binder<T>) binder); } public void refresh() { for (Binder<?> b : binders.values()) b.refresh(); } public void addBinder(Binder<?> binder, String name) { binders.put(name, (Binder<T>) binder); } public void update() { bindingResult = null; for (Binder<?> b : binders.values()) b.update(); } public PropertyBinder getBinder(String propertyName) { PropertyBinder binder = (PropertyBinder) binders.get(propertyName); if (binder != null) return binder; if (PropertyUtils.isNested(propertyName)) { BinderHolder binderHolder = (BinderHolder) binders.get(PropertyUtils.getFirstPropertyName(propertyName)); return binderHolder != null ? binderHolder.getBinder(PropertyUtils.getNestedPath(propertyName)) : null; } return null; } public Set<String> getPropertyNames() { return binders.keySet(); } public Collection<Binder<T>> getPropertyBinders() { return binders.values(); } /** * @return the binderFactory */ public BinderFactory getBinderFactory() { return binderFactory; } /** * @param binderFactory the binderFactory to set */ public void setBinderFactory(BinderFactory binderFactory) { this.binderFactory = binderFactory; } public T getModel() { return model; } public void setModel(T model) { this.model = model; } /** * {@inheritDoc} */ public BindingResult getBindingResult() { if (getModel() == null) return null; if (bindingResult == null) { createBindingResult(); for (Binder<?> b : binders.values()) { if (b.getBindingResult() != null && bindingResult.getObjectName().equals(b.getBindingResult().getObjectName())) bindingResult.addAllErrors(b.getBindingResult()); } } return bindingResult; } private void createBindingResult() { bindingResult = new BeanPropertyBindingResult(getModel(), getModel().getClass().getSimpleName()); } public void autobind(Object view) { BeanWrapper bw = PropertyAccessorFactory.forBeanPropertyAccess(getModel()); PropertyAccessor viewPropertyAccessor = new DirectFieldAccessor(view); // iterate on model properties for (PropertyDescriptor pd : bw.getPropertyDescriptors()) { String propertyName = pd.getName(); if ( !ignoredProperties.contains(propertyName) && viewPropertyAccessor.isReadableProperty(propertyName)) { Object control = viewPropertyAccessor.getPropertyValue(propertyName); if (control != null) { if (log.isDebugEnabled()) log.debug("Found control: " + control.getClass().getSimpleName() + " for property: " + propertyName); bind(control, propertyName); } } } } /** * @return the ignoredProperties */ public List<String> getIgnoredProperties() { return ignoredProperties; } /** * @param ignoredProperties the ignoredProperties to set */ public void setIgnoredProperties(List<String> ignoredProperties) { this.ignoredProperties = ignoredProperties; } /** * Add a property name to ignore on binding. * @param propertyName property name to ignore */ public void ignoreProperty(String propertyName) { ignoredProperties.add(propertyName); } }
package mil.navy.spawar.swif.security.filters; import static org.junit.Assert.*; import java.io.BufferedReader; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Collection; import java.util.Map; import mil.navy.spawar.swif.security.SwifUserDetailsImpl; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.access.AuthorizationServiceException; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.mongodb.BasicDBList; import com.mongodb.BasicDBObject; import com.mongodb.DBObject; import com.mongodb.util.JSON; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations={"classpath:classification-module-test-config.xml"}) public class ClassificationMongoRecordFilterTests { private static final String EXPECTED_DB_LABEL = "securityLabel"; private static final String EXPECTED_DB_ATTR = "classification"; private static final String EXPECTED_USERDETAIL_ATTR = "classification"; @Autowired(required=true) private ClassificationMongoRecordFilter filter; @Autowired(required=true) private SwifUserDetailsImpl userDetails; private BasicDBList inputData; private Map<String, Collection<String>> userAttrs; @Before public void setUp() { // check filter config assertNotNull(filter); assertTrue(filter instanceof ClassificationMongoRecordFilter); assertEquals(EXPECTED_DB_LABEL, filter.getDatabaseLabel()); assertEquals(EXPECTED_DB_ATTR, filter.getAttributeConfig().getDbAttributeName()); assertEquals(EXPECTED_USERDETAIL_ATTR, filter.getAttributeConfig().getUserDetailsName()); // check userDetail config assertNotNull(userDetails); assertTrue(userDetails instanceof SwifUserDetailsImpl); assertNotNull(userDetails.getCustomAttributes()); assertTrue(userDetails.getCustomAttributes().containsKey(EXPECTED_USERDETAIL_ATTR)); userAttrs = userDetails.getCustomAttributes(); // load input data inputData = loadJson("/classification-module-test-data.json"); assertNotNull(inputData); assertTrue(inputData instanceof BasicDBList); assertEquals(5, ((BasicDBList) inputData).size()); } @Test @DirtiesContext public void testCanSeeEveryThing() { // set user so they have all access userAttrs.get(EXPECTED_USERDETAIL_ATTR).clear(); userAttrs.get(EXPECTED_USERDETAIL_ATTR).add("LEVEL-1"); userAttrs.get(EXPECTED_USERDETAIL_ATTR).add("LEVEL-2"); userAttrs.get(EXPECTED_USERDETAIL_ATTR).add("LEVEL-3"); userAttrs.get(EXPECTED_USERDETAIL_ATTR).add("LEVEL-4"); userAttrs.get(EXPECTED_USERDETAIL_ATTR).add("LEVEL-5"); // do filter BasicDBList result = filter.filter(inputData, userDetails); // check post-conditions assertNotNull(result); assertEquals(5,result.size()); } @Test @DirtiesContext public void testCanNotSeeAnyThing() { // make user have no access userAttrs.get(EXPECTED_USERDETAIL_ATTR).clear(); assertEquals(0,userAttrs.get(EXPECTED_USERDETAIL_ATTR).size()); userAttrs.get(EXPECTED_USERDETAIL_ATTR).add("LEVEL-0"); // do filter BasicDBList result = filter.filter(inputData, userDetails); // check post-conditions assertNotNull(result); assertEquals(0,result.size()); } @Test @DirtiesContext public void testUserCanSeeSomeRecords() { DBObject rec = null; BasicDBList result = null; // set user so they have some access userAttrs.get(EXPECTED_USERDETAIL_ATTR).clear(); userAttrs.get(EXPECTED_USERDETAIL_ATTR).add("LEVEL-1"); userAttrs.get(EXPECTED_USERDETAIL_ATTR).add("LEVEL-2"); // do filter result = filter.filter(inputData, userDetails); // check post-conditions assertNotNull(result); assertEquals(2,result.size()); // check correct rec return assertNotNull(result.get(0)); assertTrue(result.get(0) instanceof DBObject); rec = (DBObject) result.get(0); assertTrue(rec.containsField("name")); assertEquals("doc-1",rec.get("name")); // check correct rec return assertNotNull(result.get(1)); assertTrue(result.get(1) instanceof DBObject); rec = (DBObject) result.get(1); assertTrue(rec.containsField("name")); assertEquals("doc-2",rec.get("name")); // remove some access from user userAttrs.get(EXPECTED_USERDETAIL_ATTR).clear(); userAttrs.get(EXPECTED_USERDETAIL_ATTR).add("LEVEL-1"); // do filter result = filter.filter(inputData, userDetails); // check post-conditions assertNotNull(result); assertEquals(1,result.size()); // check correct rec return assertNotNull(result.get(0)); assertTrue(result.get(0) instanceof DBObject); rec = (DBObject) result.get(0); assertTrue(rec.containsField("name")); assertEquals("doc-1",rec.get("name")); } @Test(expected=AuthorizationServiceException.class) public void testDataIsNotLabeled() { // remove markings from data for(int i=0; i<5; i++) { assertTrue(inputData.get(i) instanceof BasicDBObject); BasicDBObject rec = (BasicDBObject) inputData.get(i); assertTrue(rec.containsField(EXPECTED_DB_LABEL)); Object securityContextObj = rec.get(EXPECTED_DB_LABEL); assertTrue(securityContextObj instanceof BasicDBObject); BasicDBObject securityContext = (BasicDBObject) securityContextObj; assertTrue(securityContext.containsField(EXPECTED_DB_ATTR)); securityContext.removeField(EXPECTED_DB_ATTR); assertFalse(securityContext.containsField(EXPECTED_DB_ATTR)); } // do filter & expect error filter.filter(inputData, userDetails); } @Test(expected=AuthorizationServiceException.class) @DirtiesContext public void testUserHasNoClassificationMarking() { // make user have no access userAttrs.get(EXPECTED_USERDETAIL_ATTR).clear(); assertEquals(0,userAttrs.get(EXPECTED_USERDETAIL_ATTR).size()); // do filter & expect error filter.filter(inputData, userDetails); } @Test(expected=AuthorizationServiceException.class) @DirtiesContext public void testUserHasNoClassificationAttribute() { // make user have no attr userAttrs.clear(); assertFalse(userAttrs.containsKey(EXPECTED_USERDETAIL_ATTR)); // do filter & expect error filter.filter(inputData, userDetails); } private BasicDBList loadJson(String resource) { String jsonData = loadResourceAsString(resource); assertNotNull(jsonData); Object jsonObjects = JSON.parse(jsonData); assertNotNull(jsonObjects); assertTrue(jsonObjects instanceof BasicDBList); BasicDBList result = (BasicDBList) jsonObjects; return result; } private String loadResourceAsString(String resource) { try { InputStream is = this.getClass().getResourceAsStream(resource); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); StringBuilder result = new StringBuilder(); String txtLine; while((txtLine = br.readLine()) != null) { result.append(txtLine); } return result.toString(); } catch(Exception ex) { return null; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.recordservice.examples.terasort; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.math.BigInteger; import java.util.zip.Checksum; import org.apache.hadoop.util.PureJavaCrc32; /** * A single process data generator for the terasort data. Based on gensort.c * version 1.1 (3 Mar 2009) from Chris Nyberg <chris.nyberg@ordinal.com>. */ public class GenSort { static final byte DELIM1 = '\n'; static final byte DELIM2 = '\r'; static final byte DELIM3 = (byte)0x01; static final byte REPLACE = (byte)0xFF; /** * Generate a "binary" record suitable for all sort benchmarks *except* * PennySort. */ static void generateRecord(byte[] recBuf, Unsigned16 rand, Unsigned16 recordNumber) { /* generate the 10-byte key using the high 10 bytes of the 128-bit * random number */ for(int i=0; i < 10; ++i) { recBuf[i] = rand.getByte(i); } /* add 2 bytes of "break" */ recBuf[10] = 0x00; recBuf[11] = 0x11; /* convert the 128-bit record number to 32 bits of ascii hexadecimal * as the next 32 bytes of the record. */ for (int i = 0; i < 32; i++) { recBuf[12 + i] = (byte) recordNumber.getHexDigit(i); } /* add 4 bytes of "break" data */ recBuf[44] = (byte) 0x88; recBuf[45] = (byte) 0x99; recBuf[46] = (byte) 0xAA; recBuf[47] = (byte) 0xBB; /* add 48 bytes of filler based on low 48 bits of random number */ for(int i=0; i < 12; ++i) { recBuf[48+i*4] = recBuf[49+i*4] = recBuf[50+i*4] = recBuf[51+i*4] = (byte) rand.getHexDigit(20 + i); } /* add 4 bytes of "break" data */ recBuf[96] = (byte) 0xCC; recBuf[97] = (byte) 0xDD; recBuf[98] = (byte) 0xEE; recBuf[99] = (byte) 0xFF; // Replace all instances of DELIM with replace and set the last character // to delim. for (int i = 0; i < 100; ++i) { if (recBuf[i] == DELIM1 || recBuf[i] == DELIM2 || recBuf[i] == DELIM3) { recBuf[i] = REPLACE; } } } private static BigInteger makeBigInteger(long x) { byte[] data = new byte[8]; for(int i=0; i < 8; ++i) { data[i] = (byte) (x >>> (56 - 8*i)); } return new BigInteger(1, data); } private static final BigInteger NINETY_FIVE = new BigInteger("95"); /** * Generate an ascii record suitable for all sort benchmarks including * PennySort. */ static void generateAsciiRecord(byte[] recBuf, Unsigned16 rand, Unsigned16 recordNumber) { /* generate the 10-byte ascii key using mostly the high 64 bits. */ long temp = rand.getHigh8(); if (temp < 0) { // use biginteger to avoid the negative sign problem BigInteger bigTemp = makeBigInteger(temp); recBuf[0] = (byte) (' ' + (bigTemp.mod(NINETY_FIVE).longValue())); temp = bigTemp.divide(NINETY_FIVE).longValue(); } else { recBuf[0] = (byte) (' ' + (temp % 95)); temp /= 95; } for(int i=1; i < 8; ++i) { recBuf[i] = (byte) (' ' + (temp % 95)); temp /= 95; } temp = rand.getLow8(); if (temp < 0) { BigInteger bigTemp = makeBigInteger(temp); recBuf[8] = (byte) (' ' + (bigTemp.mod(NINETY_FIVE).longValue())); temp = bigTemp.divide(NINETY_FIVE).longValue(); } else { recBuf[8] = (byte) (' ' + (temp % 95)); temp /= 95; } recBuf[9] = (byte)(' ' + (temp % 95)); /* add 2 bytes of "break" */ recBuf[10] = ' '; recBuf[11] = ' '; /* convert the 128-bit record number to 32 bits of ascii hexadecimal * as the next 32 bytes of the record. */ for (int i = 0; i < 32; i++) { recBuf[12 + i] = (byte) recordNumber.getHexDigit(i); } /* add 2 bytes of "break" data */ recBuf[44] = ' '; recBuf[45] = ' '; /* add 52 bytes of filler based on low 48 bits of random number */ for(int i=0; i < 13; ++i) { recBuf[46+i*4] = recBuf[47+i*4] = recBuf[48+i*4] = recBuf[49+i*4] = (byte) rand.getHexDigit(19 + i); } /* add 2 bytes of "break" data */ recBuf[98] = '\r'; /* nice for Windows */ recBuf[99] = '\n'; } private static void usage() { PrintStream out = System.out; out.println("usage: gensort [-a] [-c] [-bSTARTING_REC_NUM] NUM_RECS FILE_NAME"); out.println("-a Generate ascii records required for PennySort or JouleSort."); out.println(" These records are also an alternative input for the other"); out.println(" sort benchmarks. Without this flag, binary records will be"); out.println(" generated that contain the highest density of randomness in"); out.println(" the 10-byte key."); out.println( "-c Calculate the sum of the crc32 checksums of each of the"); out.println(" generated records and send it to standard error."); out.println("-bN Set the beginning record generated to N. By default the"); out.println(" first record generated is record 0."); out.println("NUM_RECS The number of sequential records to generate."); out.println("FILE_NAME The name of the file to write the records to.\n"); out.println("Example 1 - to generate 1000000 ascii records starting at record 0 to"); out.println("the file named \"pennyinput\":"); out.println(" gensort -a 1000000 pennyinput\n"); out.println("Example 2 - to generate 1000 binary records beginning with record 2000"); out.println("to the file named \"partition2\":"); out.println(" gensort -b2000 1000 partition2"); System.exit(1); } public static void outputRecords(OutputStream out, boolean useAscii, Unsigned16 firstRecordNumber, Unsigned16 recordsToGenerate, Unsigned16 checksum ) throws IOException { byte[] row = new byte[100]; Unsigned16 recordNumber = new Unsigned16(firstRecordNumber); Unsigned16 lastRecordNumber = new Unsigned16(firstRecordNumber); Checksum crc = new PureJavaCrc32(); Unsigned16 tmp = new Unsigned16(); lastRecordNumber.add(recordsToGenerate); Unsigned16 ONE = new Unsigned16(1); Unsigned16 rand = Random16.skipAhead(firstRecordNumber); while (!recordNumber.equals(lastRecordNumber)) { Random16.nextRand(rand); if (useAscii) { generateAsciiRecord(row, rand, recordNumber); } else { generateRecord(row, rand, recordNumber); } if (checksum != null) { crc.reset(); crc.update(row, 0, row.length); tmp.set(crc.getValue()); checksum.add(tmp); } recordNumber.add(ONE); out.write(row); } } public static void main(String[] args) throws Exception { Unsigned16 startingRecord = new Unsigned16(); Unsigned16 numberOfRecords; OutputStream out; boolean useAscii = false; Unsigned16 checksum = null; int i; for(i=0; i < args.length; ++i) { String arg = args[i]; int argLength = arg.length(); if (argLength >= 1 && arg.charAt(0) == '-') { if (argLength < 2) { usage(); } switch (arg.charAt(1)) { case 'a': useAscii = true; break; case 'b': startingRecord = Unsigned16.fromDecimal(arg.substring(2)); break; case 'c': checksum = new Unsigned16(); break; default: usage(); } } else { break; } } if (args.length - i != 2) { usage(); } numberOfRecords = Unsigned16.fromDecimal(args[i]); out = new FileOutputStream(args[i+1]); outputRecords(out, useAscii, startingRecord, numberOfRecords, checksum); out.close(); if (checksum != null) { System.out.println(checksum); } } }
package openblocks.common.tileentity; import java.util.List; import java.util.Random; import java.util.UUID; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTUtil; import net.minecraft.server.MinecraftServer; import net.minecraft.util.AxisAlignedBB; import net.minecraft.world.WorldServer; import net.minecraftforge.common.util.Constants; import openblocks.Config; import openblocks.common.MagnetWhitelists; import openblocks.common.entity.EntityMiniMe; import openmods.Log; import openmods.api.IBreakAwareTile; import openmods.api.IPlacerAwareTile; import openmods.entity.EntityBlock; import openmods.fakeplayer.FakePlayerPool; import openmods.fakeplayer.FakePlayerPool.PlayerUser; import openmods.fakeplayer.OpenModsFakePlayer; import openmods.sync.SyncableEnum; import openmods.tileentity.SyncedTileEntity; import com.google.common.base.Objects; import com.google.common.collect.Lists; import com.mojang.authlib.GameProfile; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; public class TileEntityGoldenEgg extends SyncedTileEntity implements IPlacerAwareTile, IBreakAwareTile { private static final float SPEED_CHANGE_RATE = 0.1f; private static final Random RANDOM = new Random(); private static final int STAGE_CHANGE_TICK = 100; private static final int RISING_TIME = 400; private static final int FALLING_TIME = 10; public static final int MAX_HEIGHT = 5; private static final double STAGE_CHANGE_CHANCE = 0.8; private static final GameProfile MR_GLITCH = new GameProfile(UUID.fromString("d4d119aa-d410-488a-8734-0053577d4a1a"), null); public static enum State { INERT(0, 0, false) { @Override public State getNextState(TileEntityGoldenEgg target) { return target.tryRandomlyChangeState(STAGE_CHANGE_TICK, ROTATING_SLOW); } @Override public void onServerTick(TileEntityGoldenEgg target, WorldServer world) { target.tickCounter++; } }, ROTATING_SLOW(1, 0, false) { @Override public State getNextState(TileEntityGoldenEgg target) { return target.tryRandomlyChangeState(STAGE_CHANGE_TICK, ROTATING_MEDIUM); } @Override public void onServerTick(TileEntityGoldenEgg target, WorldServer world) { target.tickCounter++; } }, ROTATING_MEDIUM(10, 0, false) { @Override public State getNextState(TileEntityGoldenEgg target) { return target.tryRandomlyChangeState(STAGE_CHANGE_TICK, ROTATING_FAST); } @Override public void onServerTick(TileEntityGoldenEgg target, WorldServer world) { target.tickCounter++; } }, ROTATING_FAST(50, 0, false) { @Override public State getNextState(TileEntityGoldenEgg target) { return target.tryRandomlyChangeState(STAGE_CHANGE_TICK, FLOATING); } @Override public void onServerTick(TileEntityGoldenEgg target, WorldServer world) { target.tickCounter++; } }, FLOATING(100, 1.0f / RISING_TIME, true) { @Override public void onEntry(TileEntityGoldenEgg target) { target.tickCounter = RISING_TIME; } @Override public void onServerTick(TileEntityGoldenEgg target, WorldServer world) { target.tickCounter--; if (Config.eggCanPickBlocks && RANDOM.nextInt(6) == 0) { int posX = target.xCoord + RANDOM.nextInt(20) - 10; int posY = target.yCoord + RANDOM.nextInt(2) - 1; int posZ = target.zCoord + RANDOM.nextInt(20) - 10; boolean canMove = MagnetWhitelists.instance.testBlock(target.worldObj, posX, posY, posZ); if (canMove) target.pickUpBlock(world, posX, posY, posZ); } } @Override public State getNextState(TileEntityGoldenEgg target) { return (target.tickCounter <= 0)? FALLING : null; } }, FALLING(150, -1.0f / FALLING_TIME, true) { @Override public void onEntry(TileEntityGoldenEgg target) { target.tickCounter = FALLING_TIME; target.dropBlocks(); } @Override public void onServerTick(TileEntityGoldenEgg target, WorldServer world) { target.tickCounter--; } @Override public State getNextState(TileEntityGoldenEgg target) { return (target.tickCounter <= 0)? EXPLODING : null; } }, EXPLODING(666, 0, true) { @Override public void onEntry(TileEntityGoldenEgg target) { target.explode(); } @Override public State getNextState(TileEntityGoldenEgg target) { return null; } }; public final float rotationSpeed; public final float progressSpeed; public final boolean specialEffects; public void onEntry(TileEntityGoldenEgg target) {} public void onServerTick(TileEntityGoldenEgg target, WorldServer world) {} public abstract State getNextState(TileEntityGoldenEgg target); private State(float rotationSpeed, float riseSpeed, boolean specialEffects) { this.rotationSpeed = rotationSpeed; this.progressSpeed = riseSpeed; this.specialEffects = specialEffects; } } public int tickCounter; private float rotation; private float progress; private float rotationSpeed; private float progressSpeed; private List<EntityBlock> blocks = Lists.newArrayList(); private SyncableEnum<State> stage; private GameProfile owner; public float getRotation(float partialTickTime) { return rotation + rotationSpeed * partialTickTime; } public float getProgress(float partialTickTime) { return progress + progressSpeed * partialTickTime; } public float getOffset(float partialTickTime) { return getProgress(partialTickTime) * MAX_HEIGHT; } public State tryRandomlyChangeState(int delay, State nextState) { return (tickCounter % delay == 0) && (RANDOM.nextDouble() < STAGE_CHANGE_CHANCE)? nextState : null; } @Override protected void createSyncedFields() { stage = SyncableEnum.create(State.INERT); } private void pickUpBlock(final WorldServer world, final int x, final int y, final int z) { FakePlayerPool.instance.executeOnPlayer(world, new PlayerUser() { @Override public void usePlayer(OpenModsFakePlayer fakePlayer) { EntityBlock block = EntityBlock.create(fakePlayer, worldObj, x, y, z); if (block != null) { block.setHasAirResistance(false); block.setHasGravity(false); block.motionY = 0.1; blocks.add(block); world.spawnEntityInWorld(block); } } }); } private void dropBlocks() { for (EntityBlock block : blocks) { block.motionY = -0.9; block.setHasGravity(true); } blocks.clear(); } private void explode() { worldObj.setBlockToAir(xCoord, yCoord, zCoord); worldObj.createExplosion(null, 0.5 + xCoord, 0.5 + yCoord, 0.5 + zCoord, 2, true); EntityMiniMe miniMe = new EntityMiniMe(worldObj, Objects.firstNonNull(owner, MR_GLITCH)); miniMe.setPositionAndRotation(xCoord + 0.5, yCoord + 0.5, zCoord + 0.5, 0, 0); worldObj.spawnEntityInWorld(miniMe); } public State getState() { return stage.get(); } @Override public void updateEntity() { super.updateEntity(); State state = getState(); if (worldObj.isRemote) { rotationSpeed = (1 - SPEED_CHANGE_RATE) * rotationSpeed + SPEED_CHANGE_RATE * state.rotationSpeed; rotation += rotationSpeed; progressSpeed = (1 - SPEED_CHANGE_RATE) * progressSpeed + SPEED_CHANGE_RATE * state.progressSpeed; progress += progressSpeed; } else { if (worldObj instanceof WorldServer) state.onServerTick(this, (WorldServer)worldObj); State nextState = state.getNextState(this); if (nextState != null) { stage.set(nextState); nextState.onEntry(this); sync(); } } } @Override public void writeToNBT(NBTTagCompound nbt) { super.writeToNBT(nbt); if (owner != null) { NBTTagCompound ownerTag = new NBTTagCompound(); NBTUtil.func_152460_a(ownerTag, owner); nbt.setTag("Owner", ownerTag); } } @Override public void readFromNBT(NBTTagCompound nbt) { super.readFromNBT(nbt); if (nbt.hasKey("owner", Constants.NBT.TAG_STRING)) { String ownerName = nbt.getString("owner"); this.owner = MinecraftServer.getServer().func_152358_ax().func_152655_a(ownerName); } else if (nbt.hasKey("OwnerUUID", Constants.NBT.TAG_STRING)) { final String uuidStr = nbt.getString("OwnerUUID"); try { UUID uuid = UUID.fromString(uuidStr); this.owner = new GameProfile(uuid, null); } catch (IllegalArgumentException e) { Log.warn(e, "Failed to parse UUID: %s", uuidStr); } } else if (nbt.hasKey("Owner", Constants.NBT.TAG_COMPOUND)) { this.owner = NBTUtil.func_152459_a(nbt.getCompoundTag("Owner")); } } @Override public void onBlockBroken() { dropBlocks(); } @Override public void onBlockPlacedBy(EntityLivingBase placer, ItemStack stack) { if (!worldObj.isRemote && placer instanceof EntityPlayer) { this.owner = ((EntityPlayer)placer).getGameProfile(); } } @Override @SideOnly(Side.CLIENT) public AxisAlignedBB getRenderBoundingBox() { return AxisAlignedBB.getBoundingBox(xCoord, -1024, zCoord, xCoord + 1, 1024, zCoord + 1); } }
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.siddhi.core.query.processor.stream.window; import org.wso2.siddhi.annotation.Extension; import org.wso2.siddhi.annotation.Parameter; import org.wso2.siddhi.annotation.ReturnAttribute; import org.wso2.siddhi.annotation.util.DataType; import org.wso2.siddhi.core.config.ExecutionPlanContext; import org.wso2.siddhi.core.event.ComplexEvent; import org.wso2.siddhi.core.event.ComplexEventChunk; import org.wso2.siddhi.core.event.state.StateEvent; import org.wso2.siddhi.core.event.stream.StreamEvent; import org.wso2.siddhi.core.event.stream.StreamEventCloner; import org.wso2.siddhi.core.executor.ConstantExpressionExecutor; import org.wso2.siddhi.core.executor.ExpressionExecutor; import org.wso2.siddhi.core.executor.VariableExpressionExecutor; import org.wso2.siddhi.core.query.processor.Processor; import org.wso2.siddhi.core.query.processor.SchedulingProcessor; import org.wso2.siddhi.core.table.EventTable; import org.wso2.siddhi.core.util.Scheduler; import org.wso2.siddhi.core.util.collection.operator.Finder; import org.wso2.siddhi.core.util.collection.operator.MatchingMetaStateHolder; import org.wso2.siddhi.core.util.parser.OperatorParser; import org.wso2.siddhi.query.api.definition.Attribute; import org.wso2.siddhi.query.api.exception.ExecutionPlanValidationException; import org.wso2.siddhi.query.api.expression.Expression; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @Extension( name = "externalTimeBatch", namespace = "", description = "A batch (tumbling) time window based on external time, that holds events arrived " + "during windowTime periods, and gets updated for every windowTime.", parameters = { @Parameter(name = "timestamp", description = "The time which the window determines as current time and will act upon. " + "The value of this parameter should be monotonically increasing.", type = {DataType.LONG}), @Parameter(name = "windowTime", description = "The batch time period for which the window should hold events.", type = {DataType.INT, DataType.LONG, DataType.TIME}), @Parameter(name = "startTime", description = "User defined start time. This could either be a constant (of type int, " + "long or time) or an attribute of the corresponding stream (of type long). " + "If an attribute is provided, initial value of attribute would be considered as " + "startTime. When startTime is not given, initial value of timestamp " + "is used as the default.", type = {DataType.INT, DataType.LONG, DataType.TIME}, optional = true), @Parameter(name = "timeout", description = "Time to wait for arrival of new event, before flushing " + "and giving output for events belonging to a specific batch. If timeout is " + "not provided, system waits till an event from next batch arrives to " + "flush current batch.", type = {DataType.INT, DataType.LONG, DataType.TIME}, optional = true) }, returnAttributes = @ReturnAttribute( description = "Returns current and expired events.", type = {}) ) public class ExternalTimeBatchWindowProcessor extends WindowProcessor implements SchedulingProcessor, FindableProcessor { private ComplexEventChunk<StreamEvent> currentEventChunk = new ComplexEventChunk<StreamEvent>(false); private ComplexEventChunk<StreamEvent> expiredEventChunk = null; private StreamEvent resetEvent = null; private VariableExpressionExecutor timestampExpressionExecutor; private ExpressionExecutor startTimeAsVariable; private long timeToKeep; private long endTime = -1; private long startTime = 0; private boolean isStartTimeEnabled = false; private long schedulerTimeout = 0; private Scheduler scheduler; private long lastScheduledTime; private long lastCurrentEventTime; private boolean flushed = false; private boolean storeExpiredEvents = false; private boolean replaceTimestampWithBatchEndTime = false; @Override protected void init(ExpressionExecutor[] attributeExpressionExecutors, ExecutionPlanContext executionPlanContext) { if (outputExpectsExpiredEvents) { this.expiredEventChunk = new ComplexEventChunk<StreamEvent>(false); this.storeExpiredEvents = true; } if (attributeExpressionExecutors.length >= 2 && attributeExpressionExecutors.length <= 5) { if (!(attributeExpressionExecutors[0] instanceof VariableExpressionExecutor)) { throw new ExecutionPlanValidationException("ExternalTime window's 1st parameter timestamp should be a variable, but found " + attributeExpressionExecutors[0].getClass()); } if (attributeExpressionExecutors[0].getReturnType() != Attribute.Type.LONG) { throw new ExecutionPlanValidationException("ExternalTime window's 1st parameter timestamp should be type long, but found " + attributeExpressionExecutors[0].getReturnType()); } timestampExpressionExecutor = (VariableExpressionExecutor) attributeExpressionExecutors[0]; if (attributeExpressionExecutors[1].getReturnType() == Attribute.Type.INT) { timeToKeep = (Integer) ((ConstantExpressionExecutor) attributeExpressionExecutors[1]).getValue(); } else if (attributeExpressionExecutors[1].getReturnType() == Attribute.Type.LONG) { timeToKeep = (Long) ((ConstantExpressionExecutor) attributeExpressionExecutors[1]).getValue(); } else { throw new ExecutionPlanValidationException("ExternalTimeBatch window's 2nd parameter windowTime should be either int or long, but found " + attributeExpressionExecutors[1].getReturnType()); } if (attributeExpressionExecutors.length >= 3) { isStartTimeEnabled = true; if ((attributeExpressionExecutors[2] instanceof ConstantExpressionExecutor)) { if (attributeExpressionExecutors[2].getReturnType() == Attribute.Type.INT) { startTime = Integer.parseInt(String.valueOf(((ConstantExpressionExecutor) attributeExpressionExecutors[2]).getValue())); } else if (attributeExpressionExecutors[2].getReturnType() == Attribute.Type.LONG) { startTime = Long.parseLong(String.valueOf(((ConstantExpressionExecutor) attributeExpressionExecutors[2]).getValue())); } else { throw new ExecutionPlanValidationException("ExternalTimeBatch window's 3rd parameter startTime should either be a constant (of type int or long) or an attribute (of type long), but found " + attributeExpressionExecutors[2].getReturnType()); } } else if (attributeExpressionExecutors[2].getReturnType() != Attribute.Type.LONG) { throw new ExecutionPlanValidationException("ExternalTimeBatch window's 3rd parameter startTime should either be a constant (of type int or long) or an attribute (of type long), but found " + attributeExpressionExecutors[2].getReturnType()); } else { startTimeAsVariable = attributeExpressionExecutors[2]; } } if (attributeExpressionExecutors.length >= 4) { if (attributeExpressionExecutors[3].getReturnType() == Attribute.Type.INT) { schedulerTimeout = Integer.parseInt(String.valueOf(((ConstantExpressionExecutor) attributeExpressionExecutors[3]).getValue())); } else if (attributeExpressionExecutors[3].getReturnType() == Attribute.Type.LONG) { schedulerTimeout = Long.parseLong(String.valueOf(((ConstantExpressionExecutor) attributeExpressionExecutors[3]).getValue())); } else { throw new ExecutionPlanValidationException("ExternalTimeBatch window's 4th parameter timeout should be either int or long, but found " + attributeExpressionExecutors[3].getReturnType()); } } if (attributeExpressionExecutors.length == 5) { if (attributeExpressionExecutors[4].getReturnType() == Attribute.Type.BOOL) { replaceTimestampWithBatchEndTime = Boolean.parseBoolean(String.valueOf(((ConstantExpressionExecutor) attributeExpressionExecutors[4]).getValue())); } else { throw new ExecutionPlanValidationException("ExternalTimeBatch window's 5th parameter replaceTimestampWithBatchEndTime should be bool, but found " + attributeExpressionExecutors[4].getReturnType()); } } } else { throw new ExecutionPlanValidationException("ExternalTimeBatch window should only have two to five parameters (<long> timestamp, <int|long|time> windowTime, <long> startTime, <int|long|time> timeout, <bool> replaceTimestampWithBatchEndTime), but found " + attributeExpressionExecutors.length + " input attributes"); } if (schedulerTimeout > 0) { if (expiredEventChunk == null) { this.expiredEventChunk = new ComplexEventChunk<StreamEvent>(false); } } } /** * Here an assumption is taken: * Parameter: timestamp: The time which the window determines as current time and will act upon, * the value of this parameter should be monotonically increasing. * from https://docs.wso2.com/display/CEP400/Inbuilt+Windows#InbuiltWindows-externalTime */ @Override protected void process(ComplexEventChunk<StreamEvent> streamEventChunk, Processor nextProcessor, StreamEventCloner streamEventCloner) { // event incoming trigger process. No events means no action if (streamEventChunk.getFirst() == null) { return; } List<ComplexEventChunk<StreamEvent>> complexEventChunks = new ArrayList<ComplexEventChunk<StreamEvent>>(); synchronized (this) { initTiming(streamEventChunk.getFirst()); StreamEvent nextStreamEvent = streamEventChunk.getFirst(); while (nextStreamEvent != null) { StreamEvent currStreamEvent = nextStreamEvent; nextStreamEvent = nextStreamEvent.getNext(); if (currStreamEvent.getType() == ComplexEvent.Type.TIMER) { if (lastScheduledTime <= currStreamEvent.getTimestamp()) { // implies that there have not been any more events after this schedule has been done. if (!flushed) { flushToOutputChunk(streamEventCloner, complexEventChunks, lastCurrentEventTime, true); flushed = true; } else { if (currentEventChunk.getFirst() != null) { appendToOutputChunk(streamEventCloner, complexEventChunks, lastCurrentEventTime, true); } } // rescheduling to emit the current batch after expiring it if no further events arrive. lastScheduledTime = executionPlanContext.getTimestampGenerator().currentTime() + schedulerTimeout; scheduler.notifyAt(lastScheduledTime); } continue; } else if (currStreamEvent.getType() != ComplexEvent.Type.CURRENT) { continue; } long currentEventTime = (Long) timestampExpressionExecutor.execute(currStreamEvent); if (lastCurrentEventTime < currentEventTime) { lastCurrentEventTime = currentEventTime; } if (currentEventTime < endTime) { cloneAppend(streamEventCloner, currStreamEvent); } else { if (flushed) { appendToOutputChunk(streamEventCloner, complexEventChunks, lastCurrentEventTime, false); flushed = false; } else { flushToOutputChunk(streamEventCloner, complexEventChunks, lastCurrentEventTime, false); } // update timestamp, call next processor endTime = findEndTime(lastCurrentEventTime, startTime, timeToKeep); cloneAppend(streamEventCloner, currStreamEvent); // triggering the last batch expiration. if (schedulerTimeout > 0) { lastScheduledTime = executionPlanContext.getTimestampGenerator().currentTime() + schedulerTimeout; scheduler.notifyAt(lastScheduledTime); } } } } for (ComplexEventChunk<StreamEvent> complexEventChunk : complexEventChunks) { nextProcessor.process(complexEventChunk); } } private void initTiming(StreamEvent firstStreamEvent) { // for window beginning, if window is empty, set lastSendTime to incomingChunk first. if (endTime < 0) { if (isStartTimeEnabled) { if (startTimeAsVariable == null) { endTime = findEndTime((Long) timestampExpressionExecutor.execute(firstStreamEvent), startTime, timeToKeep); } else { startTime = (Long) startTimeAsVariable.execute(firstStreamEvent); endTime = startTime + timeToKeep; } } else { startTime = (Long) timestampExpressionExecutor.execute(firstStreamEvent); endTime = startTime + timeToKeep; } if (schedulerTimeout > 0) { lastScheduledTime = executionPlanContext.getTimestampGenerator().currentTime() + schedulerTimeout; scheduler.notifyAt(lastScheduledTime); } } } private void flushToOutputChunk(StreamEventCloner streamEventCloner, List<ComplexEventChunk<StreamEvent>> complexEventChunks, long currentTime, boolean preserveCurrentEvents) { ComplexEventChunk<StreamEvent> newEventChunk = new ComplexEventChunk<StreamEvent>(true); if (outputExpectsExpiredEvents) { if (expiredEventChunk.getFirst() != null) { // mark the timestamp for the expiredType event expiredEventChunk.reset(); while (expiredEventChunk.hasNext()) { StreamEvent expiredEvent = expiredEventChunk.next(); expiredEvent.setTimestamp(currentTime); } // add expired event to newEventChunk. newEventChunk.add(expiredEventChunk.getFirst()); } } if (expiredEventChunk != null) { expiredEventChunk.clear(); } if (currentEventChunk.getFirst() != null) { // add reset event in front of current events resetEvent.setTimestamp(currentTime); newEventChunk.add(resetEvent); resetEvent = null; // move to expired events if (preserveCurrentEvents || storeExpiredEvents) { currentEventChunk.reset(); while (currentEventChunk.hasNext()) { StreamEvent currentEvent = currentEventChunk.next(); StreamEvent toExpireEvent = streamEventCloner.copyStreamEvent(currentEvent); toExpireEvent.setType(StreamEvent.Type.EXPIRED); expiredEventChunk.add(toExpireEvent); } } // add current event chunk to next processor newEventChunk.add(currentEventChunk.getFirst()); } currentEventChunk.clear(); if (newEventChunk.getFirst() != null) { complexEventChunks.add(newEventChunk); } } private void appendToOutputChunk(StreamEventCloner streamEventCloner, List<ComplexEventChunk<StreamEvent>> complexEventChunks, long currentTime, boolean preserveCurrentEvents) { ComplexEventChunk<StreamEvent> newEventChunk = new ComplexEventChunk<StreamEvent>(true); ComplexEventChunk<StreamEvent> sentEventChunk = new ComplexEventChunk<StreamEvent>(true); if (currentEventChunk.getFirst() != null) { if (expiredEventChunk.getFirst() != null) { // mark the timestamp for the expiredType event expiredEventChunk.reset(); while (expiredEventChunk.hasNext()) { StreamEvent expiredEvent = expiredEventChunk.next(); if (outputExpectsExpiredEvents) { // add expired event to newEventChunk. StreamEvent toExpireEvent = streamEventCloner.copyStreamEvent(expiredEvent); toExpireEvent.setTimestamp(currentTime); newEventChunk.add(toExpireEvent); } StreamEvent toSendEvent = streamEventCloner.copyStreamEvent(expiredEvent); toSendEvent.setType(ComplexEvent.Type.CURRENT); sentEventChunk.add(toSendEvent); } } // add reset event in front of current events StreamEvent toResetEvent = streamEventCloner.copyStreamEvent(resetEvent); toResetEvent.setTimestamp(currentTime); newEventChunk.add(toResetEvent); //add old events newEventChunk.add(sentEventChunk.getFirst()); // move to expired events if (preserveCurrentEvents || storeExpiredEvents) { currentEventChunk.reset(); while (currentEventChunk.hasNext()) { StreamEvent currentEvent = currentEventChunk.next(); StreamEvent toExpireEvent = streamEventCloner.copyStreamEvent(currentEvent); toExpireEvent.setType(StreamEvent.Type.EXPIRED); expiredEventChunk.add(toExpireEvent); } } // add current event chunk to next processor newEventChunk.add(currentEventChunk.getFirst()); } currentEventChunk.clear(); if (newEventChunk.getFirst() != null) { complexEventChunks.add(newEventChunk); } } private long findEndTime(long currentTime, long startTime, long timeToKeep) { // returns the next emission time based on system clock round time values. long elapsedTimeSinceLastEmit = (currentTime - startTime) % timeToKeep; return (currentTime + (timeToKeep - elapsedTimeSinceLastEmit)); } private void cloneAppend(StreamEventCloner streamEventCloner, StreamEvent currStreamEvent) { StreamEvent clonedStreamEvent = streamEventCloner.copyStreamEvent(currStreamEvent); if (replaceTimestampWithBatchEndTime) { clonedStreamEvent.setAttribute(endTime, timestampExpressionExecutor.getPosition()); } currentEventChunk.add(clonedStreamEvent); if (resetEvent == null) { resetEvent = streamEventCloner.copyStreamEvent(currStreamEvent); resetEvent.setType(ComplexEvent.Type.RESET); } } public void start() { //Do nothing } public void stop() { //Do nothing } @Override public Map<String, Object> currentState() { Map<String, Object> state = new HashMap<>(); state.put("StartTime", startTime); state.put("EndTime", endTime); state.put("LastScheduledTime", lastScheduledTime); state.put("LastCurrentEventTime", lastCurrentEventTime); state.put("CurrentEventChunk", currentEventChunk.getFirst()); state.put("ExpiredEventChunk", expiredEventChunk != null ? expiredEventChunk.getFirst() : null); state.put("ResetEvent", resetEvent); state.put("Flushed", flushed); return state; } @Override public void restoreState(Map<String, Object> state) { startTime = (long) state.get("StartTime"); endTime = (long) state.get("EndTime"); lastScheduledTime = (long) state.get("LastScheduledTime"); lastCurrentEventTime = (long) state.get("LastCurrentEventTime"); currentEventChunk.clear(); currentEventChunk.add((StreamEvent) state.get("CurrentEventChunk")); if (expiredEventChunk != null) { expiredEventChunk.clear(); expiredEventChunk.add((StreamEvent) state.get("ExpiredEventChunk")); } else { if (outputExpectsExpiredEvents) { expiredEventChunk = new ComplexEventChunk<StreamEvent>(false); } if (schedulerTimeout > 0) { expiredEventChunk = new ComplexEventChunk<StreamEvent>(false); } } resetEvent = (StreamEvent) state.get("ResetEvent"); flushed = (boolean) state.get("Flushed"); } public synchronized StreamEvent find(StateEvent matchingEvent, Finder finder) { return finder.find(matchingEvent, expiredEventChunk, streamEventCloner); } @Override public Finder constructFinder(Expression expression, MatchingMetaStateHolder matchingMetaStateHolder, ExecutionPlanContext executionPlanContext, List<VariableExpressionExecutor> variableExpressionExecutors, Map<String, EventTable> eventTableMap) { if (expiredEventChunk == null) { expiredEventChunk = new ComplexEventChunk<StreamEvent>(false); storeExpiredEvents = true; } return OperatorParser.constructOperator(expiredEventChunk, expression, matchingMetaStateHolder, executionPlanContext, variableExpressionExecutors, eventTableMap, queryName); } @Override public Scheduler getScheduler() { return this.scheduler; } @Override public void setScheduler(Scheduler scheduler) { this.scheduler = scheduler; } }
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.modeshape.jboss.subsystem; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ADD; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.SUBSYSTEM; import static org.jboss.as.controller.parsing.ParseUtils.requireNoAttributes; import java.util.ArrayList; import java.util.List; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import org.jboss.as.controller.descriptions.ModelDescriptionConstants; import org.jboss.as.controller.operations.common.Util; import org.jboss.as.controller.parsing.ParseUtils; import org.jboss.dmr.ModelNode; import org.jboss.dmr.ValueExpression; import org.jboss.staxmapper.XMLElementReader; import org.jboss.staxmapper.XMLExtendedStreamReader; public class ModeShapeSubsystemXMLReader_3_0 implements XMLStreamConstants, XMLElementReader<List<ModelNode>> { @Override public void readElement( final XMLExtendedStreamReader reader, final List<ModelNode> list ) throws XMLStreamException { final ModelNode subsystem = new ModelNode(); subsystem.add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME); subsystem.protect(); final ModelNode bootServices = new ModelNode(); bootServices.get(OP).set(ADD); bootServices.get(OP_ADDR).set(subsystem); list.add(bootServices); // no attributes requireNoAttributes(reader); final List<ModelNode> repositories = new ArrayList<ModelNode>(); final List<ModelNode> webapps = new ArrayList<ModelNode>(); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { if (reader.isStartElement()) { // elements switch (Namespace.forUri(reader.getNamespaceURI())) { case MODESHAPE_3_0: Element element = Element.forName(reader.getLocalName()); switch (element) { case REPOSITORY: parseRepository(reader, subsystem, repositories); break; case WEBAPP: { parseWebApp(reader, subsystem, webapps); break; } default: throw ParseUtils.unexpectedElement(reader); } break; case UNKNOWN: throw ParseUtils.unexpectedElement(reader); } } } list.addAll(webapps); list.addAll(repositories); } private void parseWebApp( final XMLExtendedStreamReader reader, final ModelNode address, final List<ModelNode> webapps ) throws XMLStreamException { final ModelNode webappAddress = address.clone(); final ModelNode webapp = Util.getEmptyOperation(ModelDescriptionConstants.ADD, webappAddress); String webappName = null; for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case NAME: { webappName = attrValue; webappAddress.add(ModelKeys.WEBAPP, webappName); webappAddress.protect(); webapp.get(OP).set(ADD); webapp.get(OP_ADDR).set(webappAddress); webapps.add(webapp); break; } case EXPLODED: { ModelAttributes.EXPLODED.parseAndSetParameter(attrValue, webapp, reader); break; } default: throw ParseUtils.unexpectedAttribute(reader, i); } } requireNoElements(reader); } private void parseRepository( final XMLExtendedStreamReader reader, final ModelNode address, final List<ModelNode> repositories ) throws XMLStreamException { final ModelNode repositoryAddress = address.clone(); final ModelNode repository = Util.getEmptyOperation(ModelDescriptionConstants.ADD, repositoryAddress); String repositoryName = null; if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case NAME: repositoryName = attrValue; repositoryAddress.add(ModelKeys.REPOSITORY, attrValue); repositoryAddress.protect(); repository.get(OP).set(ADD); repository.get(OP_ADDR).set(repositoryAddress); repositories.add(repository); break; case JNDI_NAME: ModelAttributes.JNDI_NAME.parseAndSetParameter(attrValue, repository, reader); break; case ENABLE_MONITORING: ModelAttributes.ENABLE_MONITORING.parseAndSetParameter(attrValue, repository, reader); break; case CLUSTER_STACK: ModelAttributes.CLUSTER_STACK.parseAndSetParameter(attrValue, repository, reader); break; case CLUSTER_NAME: ModelAttributes.CLUSTER_NAME.parseAndSetParameter(attrValue, repository, reader); break; case CLUSTER_CONFIG: ModelAttributes.CLUSTER_CONFIG.parseAndSetParameter(attrValue, repository, reader); break; case CLUSTER_LOCKING: ModelAttributes.CLUSTER_LOCKING.parseAndSetParameter(attrValue, repository, reader); break; case SECURITY_DOMAIN: ModelAttributes.SECURITY_DOMAIN.parseAndSetParameter(attrValue, repository, reader); break; case ANONYMOUS_ROLES: for (String role : reader.getListAttributeValue(i)) { repository.get(ModelKeys.ANONYMOUS_ROLES).add(role); } break; case ANONYMOUS_USERNAME: ModelAttributes.ANONYMOUS_USERNAME.parseAndSetParameter(attrValue, repository, reader); break; case USE_ANONYMOUS_IF_AUTH_FAILED: ModelAttributes.USE_ANONYMOUS_IF_AUTH_FAILED.parseAndSetParameter(attrValue, repository, reader); break; case GARBAGE_COLLECTION_THREAD_POOL: ModelAttributes.GARBAGE_COLLECTION_THREAD_POOL.parseAndSetParameter(attrValue, repository, reader); break; case GARBAGE_COLLECTION_INITIAL_TIME: ModelAttributes.GARBAGE_COLLECTION_INITIAL_TIME.parseAndSetParameter(attrValue, repository, reader); break; case GARBAGE_COLLECTION_INTERVAL: ModelAttributes.GARBAGE_COLLECTION_INTERVAL.parseAndSetParameter(attrValue, repository, reader); break; case DOCUMENT_OPTIMIZATION_THREAD_POOL: ModelAttributes.DOCUMENT_OPTIMIZATION_THREAD_POOL.parseAndSetParameter(attrValue, repository, reader); break; case DOCUMENT_OPTIMIZATION_INITIAL_TIME: ModelAttributes.DOCUMENT_OPTIMIZATION_INITIAL_TIME.parseAndSetParameter(attrValue, repository, reader); break; case DOCUMENT_OPTIMIZATION_INTERVAL: ModelAttributes.DOCUMENT_OPTIMIZATION_INTERVAL.parseAndSetParameter(attrValue, repository, reader); break; case DOCUMENT_OPTIMIZATION_CHILD_COUNT_TARGET: ModelAttributes.DOCUMENT_OPTIMIZATION_CHILD_COUNT_TARGET.parseAndSetParameter(attrValue, repository, reader); break; case DOCUMENT_OPTIMIZATION_CHILD_COUNT_TOLERANCE: ModelAttributes.DOCUMENT_OPTIMIZATION_CHILD_COUNT_TOLERANCE.parseAndSetParameter(attrValue, repository, reader); break; case EVENT_BUS_SIZE: ModelAttributes.EVENT_BUS_SIZE.parseAndSetParameter(attrValue, repository, reader); break; case LOCK_TIMEOUT_MILLIS: ModelAttributes.LOCK_TIMEOUT_MILLIS.parseAndSetParameter(attrValue, repository, reader); break; case REPOSITORY_MODULE_DEPENDENCIES: ModelAttributes.REPOSITORY_MODULE_DEPENDENCIES.parseAndSetParameter(attrValue, repository, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } ModelNode persistence = null; ModelNode binaryStorage = null; List<ModelNode> sequencers = new ArrayList<ModelNode>(); List<ModelNode> indexProviders = new ArrayList<ModelNode>(); List<ModelNode> indexes = new ArrayList<ModelNode>(); List<ModelNode> externalSources = new ArrayList<ModelNode>(); List<ModelNode> textExtractors = new ArrayList<ModelNode>(); List<ModelNode> authenticators = new ArrayList<ModelNode>(); List<ModelNode> multipleStorageNodes = new ArrayList<ModelNode>(); while (reader.hasNext() && (reader.nextTag() != XMLStreamConstants.END_ELEMENT)) { Element element = Element.forName(reader.getLocalName()); switch (element) { case DB_PERSISTENCE: { persistence = parseDBPersistence(reader, repositoryName); break; } case FILE_PERSISTENCE: { persistence = parseFilePersistence(reader, repositoryName); break; } case WORKSPACES: parseWorkspaces(reader, address, repository); break; case JOURNALING: { parseJournaling(reader, repository); break; } case NODE_TYPES: parseNodeTypes(reader, repository); break; // Binary storage ... case TRANSIENT_BINARY_STORAGE: addBinaryStorageConfiguration(repositories, repositoryName); binaryStorage = parseTransientBinaryStorage(reader, repositoryName); break; case FILE_BINARY_STORAGE: addBinaryStorageConfiguration(repositories, repositoryName); binaryStorage = parseFileBinaryStorage(reader, repositoryName, false); break; case DB_BINARY_STORAGE: addBinaryStorageConfiguration(repositories, repositoryName); binaryStorage = parseDatabaseBinaryStorage(reader, repositoryName, false); break; case CASSANDRA_BINARY_STORAGE: addBinaryStorageConfiguration(repositories, repositoryName); binaryStorage = parseCassandraBinaryStorage(reader, repositoryName, false); break; case MONGO_BINARY_STORAGE: addBinaryStorageConfiguration(repositories, repositoryName); binaryStorage = parseMongoBinaryStorage(reader, repositoryName, false); break; case S3_BINARY_STORAGE: addBinaryStorageConfiguration(repositories, repositoryName); binaryStorage = parseS3BinaryStorage(reader, repositoryName, false); break; case COMPOSITE_BINARY_STORAGE: addBinaryStorageConfiguration(repositories, repositoryName); multipleStorageNodes = parseCompositeBinaryStorage(reader, repositoryName); break; case CUSTOM_BINARY_STORAGE: addBinaryStorageConfiguration(repositories, repositoryName); binaryStorage = parseCustomBinaryStorage(reader, repositoryName, false); break; // Authenticators ... case AUTHENTICATORS: authenticators = parseAuthenticators(reader, repositoryName); break; // Sequencing ... case SEQUENCERS: sequencers = parseSequencers(reader, repository, address, repositoryName); break; // Index providers ... case INDEX_PROVIDERS: indexProviders = parseIndexProviders(reader, address, repositoryName); break; // Indexes ... case INDEXES: indexes = parseIndexes(reader, address, repositoryName); break; // Reindexing... case REINDEXIG: { parseReindexing(reader, repository); break; } // External sources ... case EXTERNAL_SOURCES: externalSources = parseExternalSources(reader, address, repositoryName); break; // Text extracting ... case TEXT_EXTRACTORS: textExtractors = parseTextExtracting(reader, repository, repositoryName); break; default: throw ParseUtils.unexpectedElement(reader); } } if (binaryStorage != null) repositories.add(binaryStorage); if (persistence != null) repositories.add(persistence); repositories.addAll(multipleStorageNodes); repositories.addAll(sequencers); repositories.addAll(indexProviders); repositories.addAll(indexes); repositories.addAll(externalSources); repositories.addAll(textExtractors); repositories.addAll(authenticators); } private ModelNode parseDBPersistence(XMLExtendedStreamReader reader, String repositoryName) throws XMLStreamException { final ModelNode persistence = new ModelNode(); persistence.get(OP).set(ADD); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case TABLE_NAME: ModelAttributes.TABLE_NAME.parseAndSetParameter(attrValue, persistence, reader); break; case CREATE_ON_START: ModelAttributes.CREATE_ON_START.parseAndSetParameter(attrValue, persistence, reader); break; case DROP_ON_EXIT: ModelAttributes.DROP_ON_EXIT.parseAndSetParameter(attrValue, persistence, reader); break; case URL: ModelAttributes.CONNECTION_URL.parseAndSetParameter(attrValue, persistence, reader); break; case USERNAME: ModelAttributes.USERNAME.parseAndSetParameter(attrValue, persistence, reader); break; case PASSWORD: ModelAttributes.PASSWORD.parseAndSetParameter(attrValue, persistence, reader); break; case DRIVER: ModelAttributes.DRIVER.parseAndSetParameter(attrValue, persistence, reader); break; case FETCH_SIZE: ModelAttributes.FETCH_SIZE.parseAndSetParameter(attrValue, persistence, reader); break; case COMPRESS: ModelAttributes.DB_COMPRESS.parseAndSetParameter(attrValue, persistence, reader); break; case DATA_SOURCE_JNDI_NAME: ModelAttributes.PERSISTENCE_DS_JNDI_NAME.parseAndSetParameter(attrValue, persistence, reader); break; case POOL_SIZE: ModelAttributes.POOL_SIZE.parseAndSetParameter(attrValue, persistence, reader); break; default: // extra attributes are allowed persistence.get(ModelKeys.PROPERTIES).add(attrName, attrValue); break; } } } String dbPersistenceKey = Attribute.DB_PERSISTENCE.getLocalName(); persistence.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(dbPersistenceKey, dbPersistenceKey); requireNoElements(reader); return persistence; } private ModelNode parseFilePersistence(XMLExtendedStreamReader reader, String repositoryName) throws XMLStreamException { final ModelNode persistence = new ModelNode(); persistence.get(OP).set(ADD); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case PATH: ModelAttributes.FS_PATH.parseAndSetParameter(attrValue, persistence, reader); break; case COMPRESS: ModelAttributes.FS_COMPRESS.parseAndSetParameter(attrValue, persistence, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } String fsPersistenceKey = Attribute.FS_PERSISTENCE.getLocalName(); persistence.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(fsPersistenceKey, fsPersistenceKey); requireNoElements(reader); return persistence; } private void parseNodeTypes( XMLExtendedStreamReader reader, ModelNode repository ) throws XMLStreamException { while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case NODE_TYPE: { repository.get(ModelKeys.NODE_TYPES).add(reader.getElementText()); break; } default: { throw ParseUtils.unexpectedElement(reader); } } } } private void addBinaryStorageConfiguration( final List<ModelNode> repositories, String repositoryName ) { ModelNode configuration = new ModelNode(); configuration.get(OP).set(ADD); configuration.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.CONFIGURATION, ModelKeys.BINARY_STORAGE); repositories.add(configuration); } private void parseWorkspaces( final XMLExtendedStreamReader reader, final ModelNode parentAddress, final ModelNode repository ) throws XMLStreamException { if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { // Set these as properties on the repository ModelNode ... case ALLOW_WORKSPACE_CREATION: ModelAttributes.ALLOW_WORKSPACE_CREATION.parseAndSetParameter(attrValue, repository, reader); break; case DEFAULT_WORKSPACE: ModelAttributes.DEFAULT_WORKSPACE.parseAndSetParameter(attrValue, repository, reader); break; case CACHE_SIZE: { ModelAttributes.WORKSPACES_CACHE_SIZE.parseAndSetParameter(attrValue, repository, reader); break; } default: throw ParseUtils.unexpectedAttribute(reader, i); } } } while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case WORKSPACE: { parseWorkspace(reader, repository); break; } case INITIAL_CONTENT: { repository.get(ModelKeys.DEFAULT_INITIAL_CONTENT).set(reader.getElementText()); break; } default: { throw ParseUtils.unexpectedElement(reader); } } } } private void parseReindexing( final XMLExtendedStreamReader reader, final ModelNode repository ) throws XMLStreamException { if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { // Set these as properties on the repository ModelNode ... case REINDEXING_ASNC: ModelAttributes.REINDEXING_ASYNC.parseAndSetParameter(attrValue, repository, reader); break; case REINDEXING_MODE: ModelAttributes.REINDEXING_MODE.parseAndSetParameter(attrValue, repository, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } requireNoElements(reader); } private void parseJournaling( final XMLExtendedStreamReader reader, final ModelNode repository ) throws XMLStreamException { repository.get(ModelAttributes.JOURNALING.getName()).set(true); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { // Set these as properties on the repository ModelNode ... case JOURNAL_ENABLED: ModelAttributes.JOURNAL_ENABLED.parseAndSetParameter(attrValue, repository, reader); break; case JOURNAL_PATH: ModelAttributes.JOURNAL_PATH.parseAndSetParameter(attrValue, repository, reader); break; case JOURNAL_RELATIVE_TO: ModelAttributes.JOURNAL_RELATIVE_TO.parseAndSetParameter(attrValue, repository, reader); break; case MAX_DAYS_TO_KEEP_RECORDS: ModelAttributes.MAX_DAYS_TO_KEEP_RECORDS.parseAndSetParameter(attrValue, repository, reader); break; case ASYNC_WRITES: ModelAttributes.ASYNC_WRITES.parseAndSetParameter(attrValue, repository, reader); break; case JOURNAL_GC_THREAD_POOL: { ModelAttributes.JOURNAL_GC_THREAD_POOL.parseAndSetParameter(attrValue, repository, reader); break; } case JOURNAL_GC_INITIAL_TIME: { ModelAttributes.JOURNAL_GC_INITIAL_TIME.parseAndSetParameter(attrValue, repository, reader); break; } default: throw ParseUtils.unexpectedAttribute(reader, i); } } } requireNoElements(reader); } private void parseWorkspace( final XMLExtendedStreamReader reader, final ModelNode repository ) throws XMLStreamException { String workspaceName = null; if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case NAME: workspaceName = attrValue; repository.get(ModelKeys.PREDEFINED_WORKSPACE_NAMES).add(attrValue); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case INITIAL_CONTENT: { if (workspaceName != null) { repository.get(ModelKeys.WORKSPACES_INITIAL_CONTENT).add(workspaceName, reader.getElementText()); } break; } default: { throw ParseUtils.unexpectedElement(reader); } } } } private ModelNode parseFileBinaryStorage( final XMLExtendedStreamReader reader, final String repositoryName, boolean nested ) throws XMLStreamException { final ModelNode storageType = new ModelNode(); storageType.get(OP).set(ADD); storageType.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.CONFIGURATION, ModelKeys.BINARY_STORAGE); String storeName = null; if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { // The rest go on the ModelNode for the type ... case RELATIVE_TO: ModelAttributes.RELATIVE_TO.parseAndSetParameter(attrValue, storageType, reader); break; case PATH: ModelAttributes.PATH.parseAndSetParameter(attrValue, storageType, reader); break; case TRASH: ModelAttributes.TRASH.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_VALUE_SIZE: ModelAttributes.MINIMUM_BINARY_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_STRING_SIZE: ModelAttributes.MINIMUM_STRING_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIME_TYPE_DETECTION: ModelAttributes.MIME_TYPE_DETECTION.parseAndSetParameter(attrValue, storageType, reader); break; case STORE_NAME: if (nested) { // part of a composite binary store storeName = attrValue.trim(); ModelAttributes.STORE_NAME.parseAndSetParameter(attrValue, storageType, reader); break; } default: throw ParseUtils.unexpectedAttribute(reader, i); } } } requireNoElements(reader); if (nested) { storageType.get(OP_ADDR) .add(ModelKeys.STORAGE_TYPE, ModelKeys.COMPOSITE_BINARY_STORAGE) .add(ModelKeys.NESTED_STORAGE_TYPE_FILE, storeName); } else { storageType.get(OP_ADDR).add(ModelKeys.STORAGE_TYPE, ModelKeys.FILE_BINARY_STORAGE); } return storageType; } private ModelNode parseTransientBinaryStorage( final XMLExtendedStreamReader reader, final String repositoryName) throws XMLStreamException { final ModelNode storageType = new ModelNode(); storageType.get(OP).set(ADD); storageType.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.CONFIGURATION, ModelKeys.BINARY_STORAGE); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { // The rest go on the ModelNode for the type ... case MIN_VALUE_SIZE: ModelAttributes.MINIMUM_BINARY_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_STRING_SIZE: ModelAttributes.MINIMUM_STRING_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIME_TYPE_DETECTION: ModelAttributes.MIME_TYPE_DETECTION.parseAndSetParameter(attrValue, storageType, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } requireNoElements(reader); storageType.get(OP_ADDR).add(ModelKeys.STORAGE_TYPE, ModelKeys.TRANSIENT_BINARY_STORAGE); return storageType; } private ModelNode parseDatabaseBinaryStorage( final XMLExtendedStreamReader reader, final String repositoryName, boolean nested ) throws XMLStreamException { final ModelNode storageType = new ModelNode(); storageType.get(OP).set(ADD); storageType.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.CONFIGURATION, ModelKeys.BINARY_STORAGE); String storeName = null; if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { // The rest go on the ModelNode for the type ... case DATA_SOURCE_JNDI_NAME: ModelAttributes.DATA_SOURCE_JNDI_NAME.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_VALUE_SIZE: ModelAttributes.MINIMUM_BINARY_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_STRING_SIZE: ModelAttributes.MINIMUM_STRING_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIME_TYPE_DETECTION: ModelAttributes.MIME_TYPE_DETECTION.parseAndSetParameter(attrValue, storageType, reader); break; case STORE_NAME: if (nested) { // part of a composite binary store storeName = attrValue.trim(); ModelAttributes.STORE_NAME.parseAndSetParameter(attrValue, storageType, reader); break; } default: throw ParseUtils.unexpectedAttribute(reader, i); } } } requireNoElements(reader); if (nested) { storageType.get(OP_ADDR) .add(ModelKeys.STORAGE_TYPE, ModelKeys.COMPOSITE_BINARY_STORAGE) .add(ModelKeys.NESTED_STORAGE_TYPE_DB, storeName); } else { storageType.get(OP_ADDR).add(ModelKeys.STORAGE_TYPE, ModelKeys.DB_BINARY_STORAGE); } return storageType; } private ModelNode parseCassandraBinaryStorage(final XMLExtendedStreamReader reader, final String repositoryName, boolean nested) throws XMLStreamException { final ModelNode storageType = new ModelNode(); storageType.get(OP).set(ADD); storageType.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.CONFIGURATION, ModelKeys.BINARY_STORAGE); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { // The rest go on the ModelNode for the type ... case HOST: ModelAttributes.CASSANDRA_HOST.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_VALUE_SIZE: ModelAttributes.MINIMUM_BINARY_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_STRING_SIZE: ModelAttributes.MINIMUM_STRING_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIME_TYPE_DETECTION: ModelAttributes.MIME_TYPE_DETECTION.parseAndSetParameter(attrValue, storageType, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } requireNoElements(reader); storageType.get(OP_ADDR).add(ModelKeys.STORAGE_TYPE, ModelKeys.CASSANDRA_BINARY_STORAGE); return storageType; } private ModelNode parseMongoBinaryStorage(final XMLExtendedStreamReader reader, final String repositoryName, boolean nested) throws XMLStreamException { final ModelNode storageType = new ModelNode(); storageType.get(OP).set(ADD); storageType.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.CONFIGURATION, ModelKeys.BINARY_STORAGE); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { // The rest go on the ModelNode for the type ... case HOST: ModelAttributes.MONGO_HOST.parseAndSetParameter(attrValue, storageType, reader); break; case PORT: ModelAttributes.MONGO_PORT.parseAndSetParameter(attrValue, storageType, reader); break; case DATABASE: ModelAttributes.MONGO_DATABASE.parseAndSetParameter(attrValue, storageType, reader); break; case USERNAME: ModelAttributes.MONGO_USERNAME.parseAndSetParameter(attrValue, storageType, reader); break; case PASSWORD: ModelAttributes.MONGO_PASSWORD.parseAndSetParameter(attrValue, storageType, reader); break; case HOST_ADDRESSES: ModelAttributes.MONGO_HOST_ADDRESSES.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_VALUE_SIZE: ModelAttributes.MINIMUM_BINARY_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_STRING_SIZE: ModelAttributes.MINIMUM_STRING_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIME_TYPE_DETECTION: ModelAttributes.MIME_TYPE_DETECTION.parseAndSetParameter(attrValue, storageType, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } requireNoElements(reader); storageType.get(OP_ADDR).add(ModelKeys.STORAGE_TYPE, ModelKeys.MONGO_BINARY_STORAGE); return storageType; } private ModelNode parseS3BinaryStorage(final XMLExtendedStreamReader reader, final String repositoryName, boolean nested) throws XMLStreamException { final ModelNode storageType = new ModelNode(); storageType.get(OP).set(ADD); storageType.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.CONFIGURATION, ModelKeys.BINARY_STORAGE); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case BUCKET_NAME: ModelAttributes.S3_BUCKET_NAME.parseAndSetParameter(attrValue, storageType, reader); break; case USERNAME: ModelAttributes.S3_USERNAME.parseAndSetParameter(attrValue, storageType, reader); break; case PASSWORD: ModelAttributes.S3_PASSWORD.parseAndSetParameter(attrValue, storageType, reader); break; case ENDPOINT_URL: ModelAttributes.S3_ENDPOINT_URL.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_VALUE_SIZE: ModelAttributes.MINIMUM_BINARY_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_STRING_SIZE: ModelAttributes.MINIMUM_STRING_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIME_TYPE_DETECTION: ModelAttributes.MIME_TYPE_DETECTION.parseAndSetParameter(attrValue, storageType, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } requireNoElements(reader); storageType.get(OP_ADDR).add(ModelKeys.STORAGE_TYPE, ModelKeys.S3_BINARY_STORAGE); return storageType; } private ModelNode parseCustomBinaryStorage( final XMLExtendedStreamReader reader, final String repositoryName, boolean nested ) throws XMLStreamException { final ModelNode storageType = new ModelNode(); storageType.get(OP).set(ADD); storageType.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.CONFIGURATION, ModelKeys.BINARY_STORAGE); String storeName = null; if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case CLASSNAME: ModelAttributes.CLASSNAME.parseAndSetParameter(attrValue, storageType, reader); break; case MODULE: ModelAttributes.MODULE.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_VALUE_SIZE: ModelAttributes.MINIMUM_BINARY_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIN_STRING_SIZE: ModelAttributes.MINIMUM_STRING_SIZE.parseAndSetParameter(attrValue, storageType, reader); break; case MIME_TYPE_DETECTION: ModelAttributes.MIME_TYPE_DETECTION.parseAndSetParameter(attrValue, storageType, reader); break; case STORE_NAME: if (nested) { // part of a composite binary store storeName = attrValue.trim(); ModelAttributes.STORE_NAME.parseAndSetParameter(attrValue, storageType, reader); break; } default: storageType.get(attrName).set(attrValue); break; } } } requireNoElements(reader); if (nested) { storageType.get(OP_ADDR) .add(ModelKeys.STORAGE_TYPE, ModelKeys.COMPOSITE_BINARY_STORAGE) .add(ModelKeys.NESTED_STORAGE_TYPE_CUSTOM, storeName); } else { storageType.get(OP_ADDR).add(ModelKeys.STORAGE_TYPE, ModelKeys.CUSTOM_BINARY_STORAGE); } return storageType; } private List<ModelNode> parseCompositeBinaryStorage( final XMLExtendedStreamReader reader, final String repositoryName ) throws XMLStreamException { final List<ModelNode> stores = new ArrayList<ModelNode>(); final ModelNode compositeBinaryStorage = new ModelNode(); compositeBinaryStorage.get(OP).set(ADD); compositeBinaryStorage.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.CONFIGURATION, ModelKeys.BINARY_STORAGE) .add(ModelKeys.STORAGE_TYPE, ModelKeys.COMPOSITE_BINARY_STORAGE); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case MIN_VALUE_SIZE: ModelAttributes.MINIMUM_BINARY_SIZE.parseAndSetParameter(attrValue, compositeBinaryStorage, reader); break; case MIN_STRING_SIZE: ModelAttributes.MINIMUM_STRING_SIZE.parseAndSetParameter(attrValue, compositeBinaryStorage, reader); break; case MIME_TYPE_DETECTION: ModelAttributes.MIME_TYPE_DETECTION.parseAndSetParameter(attrValue, compositeBinaryStorage, reader); break; case STORE_NAME: ModelAttributes.STORE_NAME.parseAndSetParameter(attrValue, compositeBinaryStorage, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } stores.add(compositeBinaryStorage); List<String> storeNames = new ArrayList<String>(); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final ModelNode nestedBinaryStore; final Element element = Element.forName(reader.getLocalName()); switch (element) { case FILE_BINARY_STORAGE: nestedBinaryStore = parseFileBinaryStorage(reader, repositoryName, true); break; case DB_BINARY_STORAGE: nestedBinaryStore = parseDatabaseBinaryStorage(reader, repositoryName, true); break; case CUSTOM_BINARY_STORAGE: nestedBinaryStore = parseCustomBinaryStorage(reader, repositoryName, true); break; default: throw ParseUtils.unexpectedElement(reader); } // validate store-name uniqueness within a composite store String storeName = nestedBinaryStore.get(ModelKeys.STORE_NAME).asString(); if (storeNames.contains(storeName)) { throw ParseUtils.duplicateAttribute(reader, ModelKeys.STORE_NAME + "=" + storeName); } storeNames.add(storeName); stores.add(nestedBinaryStore); ModelAttributes.NESTED_STORES.parseAndAddParameterElement(storeName, compositeBinaryStorage, reader); } return stores; } private List<ModelNode> parseAuthenticators( final XMLExtendedStreamReader reader, final String repositoryName ) throws XMLStreamException { requireNoAttributes(reader); List<ModelNode> authenticators = new ArrayList<ModelNode>(); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case AUTHENTICATOR: parseAuthenticator(reader, repositoryName, authenticators); break; default: throw ParseUtils.unexpectedElement(reader); } } return authenticators; } private void parseAuthenticator( final XMLExtendedStreamReader reader, String repositoryName, final List<ModelNode> authenticators ) throws XMLStreamException { final ModelNode authenticator = new ModelNode(); authenticator.get(OP).set(ADD); String name = null; authenticators.add(authenticator); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case NAME: name = attrValue; break; case CLASSNAME: ModelAttributes.AUTHENTICATOR_CLASSNAME.parseAndSetParameter(attrValue, authenticator, reader); if (name == null) name = attrValue; break; case MODULE: ModelAttributes.MODULE.parseAndSetParameter(attrValue, authenticator, reader); break; default: // extra attributes are allowed to set extractor-specific properties ... authenticator.get(ModelKeys.PROPERTIES).add(attrName, attrValue); break; } } } authenticator.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.AUTHENTICATOR, name); requireNoElements(reader); } private List<ModelNode> parseSequencers( final XMLExtendedStreamReader reader, final ModelNode repository, final ModelNode parentAddress, final String repositoryName ) throws XMLStreamException { if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case THREAD_POOL_NAME: ModelAttributes.SEQUENCER_THREAD_POOL_NAME.parseAndSetParameter(attrValue, repository, reader); break; case MAX_POOL_SIZE: ModelAttributes.SEQUENCER_MAX_POOL_SIZE.parseAndSetParameter(attrValue, repository, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } List<ModelNode> sequencers = new ArrayList<ModelNode>(); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case SEQUENCER: parseSequencer(reader, repositoryName, sequencers); break; default: throw ParseUtils.unexpectedElement(reader); } } return sequencers; } private void parseSequencer( XMLExtendedStreamReader reader, String repositoryName, final List<ModelNode> sequencers ) throws XMLStreamException { final ModelNode sequencer = new ModelNode(); sequencer.get(OP).set(ADD); String name = null; sequencers.add(sequencer); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case NAME: name = attrValue; break; case PATH_EXPRESSION: ModelAttributes.PATH_EXPRESSIONS.parseAndAddParameterElement(attrValue, sequencer, reader); break; case CLASSNAME: ModelAttributes.SEQUENCER_CLASSNAME.parseAndSetParameter(attrValue, sequencer, reader); if (name == null) name = attrValue; break; case MODULE: ModelAttributes.MODULE.parseAndSetParameter(attrValue, sequencer, reader); break; default: // extra attributes are allowed to set sequencer-specific properties ... sequencer.get(ModelKeys.PROPERTIES).add(attrName, attrValue); break; } } } while (reader.hasNext() && (reader.nextTag() != XMLStreamConstants.END_ELEMENT)) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case PATH_EXPRESSION: String value = reader.getElementText(); ModelAttributes.PATH_EXPRESSIONS.parseAndAddParameterElement(value, sequencer, reader); break; default: throw ParseUtils.unexpectedElement(reader); } } sequencer.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.SEQUENCER, name); } private List<ModelNode> parseIndexProviders( final XMLExtendedStreamReader reader, final ModelNode parentAddress, final String repositoryName ) throws XMLStreamException { requireNoAttributes(reader); List<ModelNode> providers = new ArrayList<ModelNode>(); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case INDEX_PROVIDER: parseIndexProvider(reader, repositoryName, providers); break; default: throw ParseUtils.unexpectedElement(reader); } } return providers; } private void parseIndexProvider( XMLExtendedStreamReader reader, String repositoryName, final List<ModelNode> providers ) throws XMLStreamException { final ModelNode provider = new ModelNode(); provider.get(OP).set(ADD); String name = null; providers.add(provider); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case NAME: name = attrValue; break; case CLASSNAME: ModelAttributes.CLASSNAME.parseAndSetParameter(attrValue, provider, reader); if (name == null) name = attrValue; break; case MODULE: ModelAttributes.MODULE.parseAndSetParameter(attrValue, provider, reader); break; case RELATIVE_TO: ModelAttributes.RELATIVE_TO.parseAndSetParameter(attrValue, provider, reader); break; case PATH: ModelAttributes.PATH.parseAndSetParameter(attrValue, provider, reader); break; default: provider.get(ModelKeys.PROPERTIES).add(attrName, attrValue); break; } } } provider.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.INDEX_PROVIDER, name); requireNoElements(reader); } private List<ModelNode> parseIndexes( final XMLExtendedStreamReader reader, final ModelNode parentAddress, final String repositoryName ) throws XMLStreamException { requireNoAttributes(reader); List<ModelNode> indexes = new ArrayList<ModelNode>(); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case INDEX: parseIndex(reader, repositoryName, indexes); break; default: throw ParseUtils.unexpectedElement(reader); } } return indexes; } private void parseIndex( XMLExtendedStreamReader reader, String repositoryName, final List<ModelNode> indexes ) throws XMLStreamException { final ModelNode index = new ModelNode(); index.get(OP).set(ADD); String name = null; indexes.add(index); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case NAME: name = attrValue; break; case PROVIDER_NAME: ModelAttributes.PROVIDER_NAME.parseAndSetParameter(attrValue, index, reader); break; case INDEX_KIND: ModelAttributes.INDEX_KIND.parseAndSetParameter(attrValue, index, reader); break; case SYNCHRONOUS: ModelAttributes.SYNCHRONOUS.parseAndSetParameter(attrValue, index, reader); break; case NODE_TYPE: ModelAttributes.NODE_TYPE_NAME.parseAndSetParameter(attrValue, index, reader); break; case COLUMNS: ModelAttributes.INDEX_COLUMNS.parseAndSetParameter(attrValue, index, reader); break; case WORKSPACES: ModelAttributes.WORKSPACES.parseAndSetParameter(attrValue, index, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } index.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.INDEX, name); requireNoElements(reader); } private List<ModelNode> parseExternalSources( final XMLExtendedStreamReader reader, final ModelNode parentAddress, final String repositoryName ) throws XMLStreamException { requireNoAttributes(reader); List<ModelNode> externalSources = new ArrayList<ModelNode>(); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case SOURCE: parseExternalSource(reader, repositoryName, externalSources); break; default: throw ParseUtils.unexpectedElement(reader); } } return externalSources; } private void parseExternalSource( XMLExtendedStreamReader reader, String repositoryName, final List<ModelNode> externalSources ) throws XMLStreamException { final ModelNode externalSource = new ModelNode(); externalSource.get(OP).set(ADD); String name = null; externalSources.add(externalSource); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case NAME: name = attrValue; break; case CLASSNAME: ModelAttributes.CONNECTOR_CLASSNAME.parseAndSetParameter(attrValue, externalSource, reader); if (name == null) { name = attrValue; } break; case MODULE: ModelAttributes.MODULE.parseAndSetParameter(attrValue, externalSource, reader); break; case CACHEABLE: { ModelAttributes.CACHEABLE.parseAndSetParameter(attrValue, externalSource, reader); break; } case QUERYABLE: { ModelAttributes.QUERYABLE.parseAndSetParameter(attrValue, externalSource, reader); break; } case READONLY: { ModelAttributes.READONLY.parseAndSetParameter(attrValue, externalSource, reader); break; } case EXPOSE_AS_WORKSPACE: { ModelAttributes.EXPOSE_AS_WORKSPACE.parseAndSetParameter(attrValue, externalSource, reader); break; } default: // extra attributes are allowed to set externalSource-specific properties ... if (!attrValue.startsWith("$")) { externalSource.get(ModelKeys.PROPERTIES).add(attrName, attrValue); } else { externalSource.get(ModelKeys.PROPERTIES).add(attrName, new ValueExpression(attrValue)); } break; } } } while (reader.hasNext() && (reader.nextTag() != XMLStreamConstants.END_ELEMENT)) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case PROJECTION: String value = reader.getElementText(); ModelAttributes.PROJECTIONS.parseAndAddParameterElement(value, externalSource, reader); break; default: throw ParseUtils.unexpectedElement(reader); } } externalSource.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.SOURCE, name); } private List<ModelNode> parseTextExtracting( final XMLExtendedStreamReader reader, final ModelNode repository, final String repositoryName ) throws XMLStreamException { if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case THREAD_POOL_NAME: ModelAttributes.TEXT_EXTRACTOR_THREAD_POOL_NAME.parseAndSetParameter(attrValue, repository, reader); break; case MAX_POOL_SIZE: ModelAttributes.TEXT_EXTRACTOR_MAX_POOL_SIZE.parseAndSetParameter(attrValue, repository, reader); break; default: throw ParseUtils.unexpectedAttribute(reader, i); } } } List<ModelNode> extractors = new ArrayList<ModelNode>(); while (reader.hasNext() && reader.nextTag() != END_ELEMENT) { final Element element = Element.forName(reader.getLocalName()); switch (element) { case TEXT_EXTRACTOR: parseTextExtractor(reader, repositoryName, extractors); break; default: throw ParseUtils.unexpectedElement(reader); } } return extractors; } private void parseTextExtractor( XMLExtendedStreamReader reader, String repositoryName, final List<ModelNode> extractors ) throws XMLStreamException { final ModelNode extractor = new ModelNode(); extractor.get(OP).set(ADD); String name = null; extractors.add(extractor); if (reader.getAttributeCount() > 0) { for (int i = 0; i < reader.getAttributeCount(); i++) { String attrName = reader.getAttributeLocalName(i); String attrValue = reader.getAttributeValue(i); Attribute attribute = Attribute.forName(attrName); switch (attribute) { case NAME: name = attrValue; break; case CLASSNAME: ModelAttributes.TEXT_EXTRACTOR_CLASSNAME.parseAndSetParameter(attrValue, extractor, reader); if (name == null) name = attrValue; break; case MODULE: ModelAttributes.MODULE.parseAndSetParameter(attrValue, extractor, reader); break; default: // extra attributes are allowed to set extractor-specific properties ... extractor.get(ModelKeys.PROPERTIES).add(attrName, attrValue); break; } } } extractor.get(OP_ADDR) .add(SUBSYSTEM, ModeShapeExtension.SUBSYSTEM_NAME) .add(ModelKeys.REPOSITORY, repositoryName) .add(ModelKeys.TEXT_EXTRACTOR, name); requireNoElements(reader); } /** * Checks that the current element has no attributes, throwing an {@link javax.xml.stream.XMLStreamException} if one is found. * * @param reader the reader * @throws javax.xml.stream.XMLStreamException if an error occurs */ protected void requireNoElements( final XMLExtendedStreamReader reader ) throws XMLStreamException { if (reader.nextTag() != END_ELEMENT) { throw ParseUtils.unexpectedElement(reader); } } }
/** * Copyright [2012-2014] PayPal Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ml.shifu.shifu.udf; import java.io.IOException; import java.util.List; import java.util.Map; import ml.shifu.shifu.container.CaseScoreResult; import ml.shifu.shifu.container.obj.EvalConfig; import ml.shifu.shifu.core.ModelRunner; import ml.shifu.shifu.fs.ShifuFileUtils; import ml.shifu.shifu.util.CommonUtils; import ml.shifu.shifu.util.Constants; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.commons.lang.StringUtils; import org.apache.pig.data.DataType; import org.apache.pig.data.Tuple; import org.apache.pig.data.TupleFactory; import org.apache.pig.impl.logicalLayer.schema.Schema; import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema; import org.apache.pig.tools.pigstats.PigStatusReporter; import org.encog.ml.BasicML; /** * Calculate the score for each evaluation data */ public class EvalScoreUDF extends AbstractTrainerUDF<Tuple> { private static final String SCHEMA_PREFIX = "shifu::"; private EvalConfig evalConfig; private ModelRunner modelRunner; private String[] headers; private List<String> negTags; private List<String> posTags; private int modelCnt; public EvalScoreUDF(String source, String pathModelConfig, String pathColumnConfig, String evalSetName) throws IOException { super(source, pathModelConfig, pathColumnConfig); evalConfig = modelConfig.getEvalConfigByName(evalSetName); negTags = modelConfig.getNegTags(); log.debug("Negative Tags: " + negTags); posTags = modelConfig.getPosTags(); log.debug("Positive Tags: " + posTags); if(evalConfig.getModelsPath() != null) { // renew columnConfig this.columnConfigList = ShifuFileUtils.searchColumnConfig(evalConfig, columnConfigList); } // create model runner this.headers = CommonUtils.getHeaders(evalConfig.getDataSet().getHeaderPath(), evalConfig.getDataSet() .getHeaderDelimiter(), evalConfig.getDataSet().getSource()); List<BasicML> models = CommonUtils .loadBasicModels(modelConfig, evalConfig, evalConfig.getDataSet().getSource()); modelRunner = new ModelRunner(modelConfig, columnConfigList, this.headers, evalConfig.getDataSet() .getDataDelimiter(), models); modelCnt = models.size(); } public Tuple exec(Tuple input) throws IOException { Map<String, String> rawDataMap = CommonUtils.convertDataIntoMap(input, this.headers); if(MapUtils.isEmpty(rawDataMap)) { return null; } CaseScoreResult cs = modelRunner.compute(rawDataMap); if(cs == null) { log.error("Get null result, for input: " + input.toDelimitedString("|")); return null; } Tuple tuple = TupleFactory.getInstance().newTuple(); String tag = rawDataMap.get(modelConfig.getTargetColumnName(evalConfig)); tuple.append(StringUtils.trimToEmpty(tag)); String weight = null; if(StringUtils.isNotBlank(evalConfig.getDataSet().getWeightColumnName())) { weight = rawDataMap.get(evalConfig.getDataSet().getWeightColumnName()); } else { weight = "1.0"; } incrementTagCounters(tag, weight); tuple.append(weight); tuple.append(cs.getAvgScore()); tuple.append(cs.getMaxScore()); tuple.append(cs.getMinScore()); tuple.append(cs.getMedianScore()); for(Integer score: cs.getScores()) { tuple.append(score); } // append meta data List<String> metaColumns = evalConfig.getScoreMetaColumns(modelConfig); if(CollectionUtils.isNotEmpty(metaColumns)) { for(String meta: metaColumns) { tuple.append(rawDataMap.get(meta)); } } return tuple; } private void incrementTagCounters(String tag, String weight) { long weightLong = (long) (Double.parseDouble(weight) * Constants.EVAL_COUNTER_WEIGHT_SCALE); if(isPigEnabled(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_RECORDS)) { PigStatusReporter.getInstance().getCounter(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_RECORDS) .increment(1); } if(posTags.contains(tag)) { if(isPigEnabled(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_POSTAGS)) { PigStatusReporter.getInstance().getCounter(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_POSTAGS) .increment(1); } if(isPigEnabled(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_WPOSTAGS)) { PigStatusReporter.getInstance().getCounter(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_WPOSTAGS) .increment(weightLong); } } if(negTags.contains(tag)) { if(isPigEnabled(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_NEGTAGS)) { PigStatusReporter.getInstance().getCounter(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_NEGTAGS) .increment(1); } if(isPigEnabled(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_WNEGTAGS)) { PigStatusReporter.getInstance().getCounter(Constants.SHIFU_GROUP_COUNTER, Constants.COUNTER_WNEGTAGS) .increment(weightLong); } } } /** * Check whether is a pig environment, for example, in unit test, PigStatusReporter.getInstance() is null */ private boolean isPigEnabled(String group, String counter) { return PigStatusReporter.getInstance() != null && PigStatusReporter.getInstance().getCounter(group, counter) != null; } /** * output the schema for evaluation score */ public Schema outputSchema(Schema input) { try { Schema tupleSchema = new Schema(); tupleSchema.add(new FieldSchema(SCHEMA_PREFIX + modelConfig.getTargetColumnName(evalConfig), DataType.CHARARRAY)); String weightName = StringUtils.isBlank(evalConfig.getDataSet().getWeightColumnName()) ? "weight" : evalConfig.getDataSet().getWeightColumnName(); tupleSchema.add(new FieldSchema(SCHEMA_PREFIX + weightName, DataType.CHARARRAY)); tupleSchema.add(new FieldSchema(SCHEMA_PREFIX + "mean", DataType.INTEGER)); tupleSchema.add(new FieldSchema(SCHEMA_PREFIX + "max", DataType.INTEGER)); tupleSchema.add(new FieldSchema(SCHEMA_PREFIX + "min", DataType.INTEGER)); tupleSchema.add(new FieldSchema(SCHEMA_PREFIX + "median", DataType.INTEGER)); for(int i = 0; i < modelCnt; i++) { tupleSchema.add(new FieldSchema(SCHEMA_PREFIX + "model" + i, DataType.INTEGER)); } List<String> metaColumns = evalConfig.getScoreMetaColumns(modelConfig); if(CollectionUtils.isNotEmpty(metaColumns)) { for(String columnName: metaColumns) { tupleSchema.add(new FieldSchema(columnName, DataType.CHARARRAY)); } } return new Schema(new Schema.FieldSchema("EvalScore", tupleSchema, DataType.TUPLE)); } catch (IOException e) { log.error("Error in outputSchema", e); return null; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs; import static org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Op.BLOCK_CHECKSUM; import static org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Status.ERROR_ACCESS_TOKEN; import static org.apache.hadoop.hdfs.protocol.DataTransferProtocol.Status.SUCCESS; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; import java.net.InetSocketAddress; import java.net.Socket; import java.net.SocketTimeoutException; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import javax.net.SocketFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.FsStatus; import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Options; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.DataTransferProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException; import org.apache.hadoop.hdfs.protocol.UnresolvedPathException; import org.apache.hadoop.hdfs.server.common.HdfsConstants; import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryProxy; import org.apache.hadoop.ipc.Client; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NodeBase; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.util.Daemon; import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.StringUtils; /******************************************************** * DFSClient can connect to a Hadoop Filesystem and * perform basic file tasks. It uses the ClientProtocol * to communicate with a NameNode daemon, and connects * directly to DataNodes to read/write block data. * * Hadoop DFS users should obtain an instance of * DistributedFileSystem, which uses DFSClient to handle * filesystem tasks. * ********************************************************/ @InterfaceAudience.Private public class DFSClient implements FSConstants, java.io.Closeable { public static final Log LOG = LogFactory.getLog(DFSClient.class); public static final long SERVER_DEFAULTS_VALIDITY_PERIOD = 60 * 60 * 1000L; // 1 hour public static final int MAX_BLOCK_ACQUIRE_FAILURES = 3; static final int TCP_WINDOW_SIZE = 128 * 1024; // 128 KB final ClientProtocol namenode; private final ClientProtocol rpcNamenode; final UserGroupInformation ugi; volatile boolean clientRunning = true; private volatile FsServerDefaults serverDefaults; private volatile long serverDefaultsLastUpdate; Random r = new Random(); final String clientName; final LeaseChecker leasechecker = new LeaseChecker(); Configuration conf; long defaultBlockSize; private short defaultReplication; SocketFactory socketFactory; int socketTimeout; final int writePacketSize; final FileSystem.Statistics stats; final int hdfsTimeout; // timeout value for a DFS operation. /** * The locking hierarchy is to first acquire lock on DFSClient object, followed by * lock on leasechecker, followed by lock on an individual DFSOutputStream. */ public static ClientProtocol createNamenode(Configuration conf) throws IOException { return createNamenode(NameNode.getAddress(conf), conf); } public static ClientProtocol createNamenode( InetSocketAddress nameNodeAddr, Configuration conf) throws IOException { return createNamenode(createRPCNamenode(nameNodeAddr, conf, UserGroupInformation.getCurrentUser())); } private static ClientProtocol createRPCNamenode(InetSocketAddress nameNodeAddr, Configuration conf, UserGroupInformation ugi) throws IOException { return (ClientProtocol)RPC.getProxy(ClientProtocol.class, ClientProtocol.versionID, nameNodeAddr, ugi, conf, NetUtils.getSocketFactory(conf, ClientProtocol.class)); } private static ClientProtocol createNamenode(ClientProtocol rpcNamenode) throws IOException { RetryPolicy createPolicy = RetryPolicies.retryUpToMaximumCountWithFixedSleep( 5, LEASE_SOFTLIMIT_PERIOD, TimeUnit.MILLISECONDS); Map<Class<? extends Exception>,RetryPolicy> remoteExceptionToPolicyMap = new HashMap<Class<? extends Exception>, RetryPolicy>(); remoteExceptionToPolicyMap.put(AlreadyBeingCreatedException.class, createPolicy); Map<Class<? extends Exception>,RetryPolicy> exceptionToPolicyMap = new HashMap<Class<? extends Exception>, RetryPolicy>(); exceptionToPolicyMap.put(RemoteException.class, RetryPolicies.retryByRemoteException( RetryPolicies.TRY_ONCE_THEN_FAIL, remoteExceptionToPolicyMap)); RetryPolicy methodPolicy = RetryPolicies.retryByException( RetryPolicies.TRY_ONCE_THEN_FAIL, exceptionToPolicyMap); Map<String,RetryPolicy> methodNameToPolicyMap = new HashMap<String,RetryPolicy>(); methodNameToPolicyMap.put("create", methodPolicy); return (ClientProtocol) RetryProxy.create(ClientProtocol.class, rpcNamenode, methodNameToPolicyMap); } static ClientDatanodeProtocol createClientDatanodeProtocolProxy ( DatanodeID datanodeid, Configuration conf) throws IOException { InetSocketAddress addr = NetUtils.createSocketAddr( datanodeid.getHost() + ":" + datanodeid.getIpcPort()); if (ClientDatanodeProtocol.LOG.isDebugEnabled()) { ClientDatanodeProtocol.LOG.info("ClientDatanodeProtocol addr=" + addr); } return (ClientDatanodeProtocol)RPC.getProxy(ClientDatanodeProtocol.class, ClientDatanodeProtocol.versionID, addr, conf); } /** * Same as this(NameNode.getAddress(conf), conf); * @see #DFSClient(InetSocketAddress, Configuration) * @deprecated Deprecated at 0.21 */ @Deprecated public DFSClient(Configuration conf) throws IOException { this(NameNode.getAddress(conf), conf); } /** * Same as this(nameNodeAddr, conf, null); * @see #DFSClient(InetSocketAddress, Configuration, org.apache.hadoop.fs.FileSystem.Statistics) */ public DFSClient(InetSocketAddress nameNodeAddr, Configuration conf ) throws IOException { this(nameNodeAddr, conf, null); } /** * Same as this(nameNodeAddr, null, conf, stats); * @see #DFSClient(InetSocketAddress, ClientProtocol, Configuration, org.apache.hadoop.fs.FileSystem.Statistics) */ public DFSClient(InetSocketAddress nameNodeAddr, Configuration conf, FileSystem.Statistics stats) throws IOException { this(nameNodeAddr, null, conf, stats); } /** * Create a new DFSClient connected to the given nameNodeAddr or rpcNamenode. * Exactly one of nameNodeAddr or rpcNamenode must be null. */ DFSClient(InetSocketAddress nameNodeAddr, ClientProtocol rpcNamenode, Configuration conf, FileSystem.Statistics stats) throws IOException { this.conf = conf; this.stats = stats; this.socketTimeout = conf.getInt(DFSConfigKeys.DFS_CLIENT_SOCKET_TIMEOUT_KEY, HdfsConstants.READ_TIMEOUT); this.socketFactory = NetUtils.getSocketFactory(conf, ClientProtocol.class); // dfs.write.packet.size is an internal config variable this.writePacketSize = conf.getInt(DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_KEY, DFSConfigKeys.DFS_CLIENT_WRITE_PACKET_SIZE_DEFAULT); // The hdfsTimeout is currently the same as the ipc timeout this.hdfsTimeout = Client.getTimeout(conf); this.ugi = UserGroupInformation.getCurrentUser(); String taskId = conf.get("mapred.task.id"); if (taskId != null) { this.clientName = "DFSClient_" + taskId; } else { this.clientName = "DFSClient_" + r.nextInt(); } defaultBlockSize = conf.getLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, DEFAULT_BLOCK_SIZE); defaultReplication = (short) conf.getInt("dfs.replication", 3); if (nameNodeAddr != null && rpcNamenode == null) { this.rpcNamenode = createRPCNamenode(nameNodeAddr, conf, ugi); this.namenode = createNamenode(this.rpcNamenode); } else if (nameNodeAddr == null && rpcNamenode != null) { //This case is used for testing. this.namenode = this.rpcNamenode = rpcNamenode; } else { throw new IllegalArgumentException( "Expecting exactly one of nameNodeAddr and rpcNamenode being null: " + "nameNodeAddr=" + nameNodeAddr + ", rpcNamenode=" + rpcNamenode); } } /** * Return the number of times the client should go back to the namenode * to retrieve block locations when reading. */ int getMaxBlockAcquireFailures() { return conf.getInt("dfs.client.max.block.acquire.failures", MAX_BLOCK_ACQUIRE_FAILURES); } /** * Return the timeout that clients should use when writing to datanodes. * @param numNodes the number of nodes in the pipeline. */ int getDatanodeWriteTimeout(int numNodes) { int confTime = conf.getInt("dfs.datanode.socket.write.timeout", HdfsConstants.WRITE_TIMEOUT); return (confTime > 0) ? (confTime + HdfsConstants.WRITE_TIMEOUT_EXTENSION * numNodes) : 0; } int getDatanodeReadTimeout(int numNodes) { return socketTimeout > 0 ? (HdfsConstants.READ_TIMEOUT_EXTENSION * numNodes + socketTimeout) : 0; } void checkOpen() throws IOException { if (!clientRunning) { IOException result = new IOException("Filesystem closed"); throw result; } } /** * Close the file system, abandoning all of the leases and files being * created and close connections to the namenode. */ public synchronized void close() throws IOException { if(clientRunning) { leasechecker.close(); clientRunning = false; try { leasechecker.interruptAndJoin(); } catch (InterruptedException ie) { } // close connections to the namenode RPC.stopProxy(rpcNamenode); } } /** * Get the default block size for this cluster * @return the default block size in bytes */ public long getDefaultBlockSize() { return defaultBlockSize; } public long getBlockSize(String f) throws IOException { try { return namenode.getPreferredBlockSize(f); } catch (IOException ie) { LOG.warn("Problem getting block size: " + StringUtils.stringifyException(ie)); throw ie; } } /** * Get server default values for a number of configuration params. */ public FsServerDefaults getServerDefaults() throws IOException { long now = System.currentTimeMillis(); if (now - serverDefaultsLastUpdate > SERVER_DEFAULTS_VALIDITY_PERIOD) { serverDefaults = namenode.getServerDefaults(); serverDefaultsLastUpdate = now; } return serverDefaults; } public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer) throws IOException { return namenode.getDelegationToken(renewer); } public long renewDelegationToken(Token<DelegationTokenIdentifier> token) throws InvalidToken, IOException { try { return namenode.renewDelegationToken(token); } catch (RemoteException re) { throw re.unwrapRemoteException(InvalidToken.class, AccessControlException.class); } } public void cancelDelegationToken(Token<DelegationTokenIdentifier> token) throws InvalidToken, IOException { try { namenode.cancelDelegationToken(token); } catch (RemoteException re) { throw re.unwrapRemoteException(InvalidToken.class, AccessControlException.class); } } /** * Report corrupt blocks that were discovered by the client. */ public void reportBadBlocks(LocatedBlock[] blocks) throws IOException { namenode.reportBadBlocks(blocks); } public short getDefaultReplication() { return defaultReplication; } static LocatedBlocks callGetBlockLocations(ClientProtocol namenode, String src, long start, long length) throws IOException, UnresolvedLinkException { try { return namenode.getBlockLocations(src, start, length); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } /** * Get block location info about file * * getBlockLocations() returns a list of hostnames that store * data for a specific file region. It returns a set of hostnames * for every block within the indicated region. * * This function is very useful when writing code that considers * data-placement when performing operations. For example, the * MapReduce system tries to schedule tasks on the same machines * as the data-block the task processes. */ public BlockLocation[] getBlockLocations(String src, long start, long length) throws IOException, UnresolvedLinkException { LocatedBlocks blocks = callGetBlockLocations(namenode, src, start, length); if (blocks == null) { return new BlockLocation[0]; } int nrBlocks = blocks.locatedBlockCount(); BlockLocation[] blkLocations = new BlockLocation[nrBlocks]; int idx = 0; for (LocatedBlock blk : blocks.getLocatedBlocks()) { assert idx < nrBlocks : "Incorrect index"; DatanodeInfo[] locations = blk.getLocations(); String[] hosts = new String[locations.length]; String[] names = new String[locations.length]; String[] racks = new String[locations.length]; for (int hCnt = 0; hCnt < locations.length; hCnt++) { hosts[hCnt] = locations[hCnt].getHostName(); names[hCnt] = locations[hCnt].getName(); NodeBase node = new NodeBase(names[hCnt], locations[hCnt].getNetworkLocation()); racks[hCnt] = node.toString(); } blkLocations[idx] = new BlockLocation(names, hosts, racks, blk.getStartOffset(), blk.getBlockSize()); idx++; } return blkLocations; } public DFSInputStream open(String src) throws IOException, UnresolvedLinkException { return open(src, conf.getInt("io.file.buffer.size", 4096), true, null); } /** * Create an input stream that obtains a nodelist from the * namenode, and then reads from all the right places. Creates * inner subclass of InputStream that does the right out-of-band * work. * @deprecated Use {@link #open(String, int, boolean)} instead. */ @Deprecated public DFSInputStream open(String src, int buffersize, boolean verifyChecksum, FileSystem.Statistics stats) throws IOException, UnresolvedLinkException { return open(src, buffersize, verifyChecksum); } /** * Create an input stream that obtains a nodelist from the * namenode, and then reads from all the right places. Creates * inner subclass of InputStream that does the right out-of-band * work. */ public DFSInputStream open(String src, int buffersize, boolean verifyChecksum) throws IOException, UnresolvedLinkException { checkOpen(); // Get block info from namenode return new DFSInputStream(this, src, buffersize, verifyChecksum); } /** * Create a new dfs file and return an output stream for writing into it. * * @param src stream name * @param overwrite do not check for file existence if true * @return output stream * @throws UnresolvedLinkException if a symlink is encountered in src. * @throws IOException */ public OutputStream create(String src, boolean overwrite) throws IOException, UnresolvedLinkException { return create(src, overwrite, defaultReplication, defaultBlockSize, null); } /** * Create a new dfs file and return an output stream for writing into it * with write-progress reporting. * * @param src stream name * @param overwrite do not check for file existence if true * @return output stream * @throws UnresolvedLinkException if a symlink is encountered in src. * @throws IOException */ public OutputStream create(String src, boolean overwrite, Progressable progress) throws IOException, UnresolvedLinkException { return create(src, overwrite, defaultReplication, defaultBlockSize, null); } /** * Create a new dfs file with the specified block replication * and return an output stream for writing into the file. * * @param src stream name * @param overwrite do not check for file existence if true * @param replication block replication * @return output stream * @throws UnresolvedLinkException if a symlink is encountered in src. * @throws IOException */ public OutputStream create(String src, boolean overwrite, short replication, long blockSize) throws IOException, UnresolvedLinkException { return create(src, overwrite, replication, blockSize, null); } /** * Get the namenode associated with this DFSClient object * @return the namenode associated with this DFSClient object */ public ClientProtocol getNamenode() { return namenode; } /** * Create a new dfs file with the specified block replication * with write-progress reporting and return an output stream for writing * into the file. * * @param src stream name * @param overwrite do not check for file existence if true * @param replication block replication * @return output stream * @throws UnresolvedLinkException if a symlink is encountered in src. * @throws IOException */ public OutputStream create(String src, boolean overwrite, short replication, long blockSize, Progressable progress) throws IOException, UnresolvedLinkException { return create(src, overwrite, replication, blockSize, progress, conf.getInt("io.file.buffer.size", 4096)); } /** * Call * {@link #create(String,FsPermission,EnumSet,short,long,Progressable,int)} * with default permission. * @see FsPermission#getDefault() */ public OutputStream create(String src, boolean overwrite, short replication, long blockSize, Progressable progress, int buffersize) throws IOException, UnresolvedLinkException { return create(src, FsPermission.getDefault(), overwrite ? EnumSet.of(CreateFlag.OVERWRITE) : EnumSet.of(CreateFlag.CREATE), replication, blockSize, progress, buffersize); } /** * Call * {@link #create(String,FsPermission,EnumSet,boolean,short,long,Progressable,int)} * with createParent set to true. */ public OutputStream create(String src, FsPermission permission, EnumSet<CreateFlag> flag, short replication, long blockSize, Progressable progress, int buffersize) throws IOException, UnresolvedLinkException { return create(src, permission, flag, true, replication, blockSize, progress, buffersize); } /** * Create a new dfs file with the specified block replication * with write-progress reporting and return an output stream for writing * into the file. * * @param src stream name * @param permission The permission of the directory being created. * If permission == null, use {@link FsPermission#getDefault()}. * @param flag do not check for file existence if true * @param createParent create missing parent directory if true * @param replication block replication * @return output stream * @throws IOException * @throws UnresolvedLinkException if src contains a symlink. * @see ClientProtocol#create(String, FsPermission, String, EnumSetWritable, boolean, short, long) */ public OutputStream create(String src, FsPermission permission, EnumSet<CreateFlag> flag, boolean createParent, short replication, long blockSize, Progressable progress, int buffersize) throws IOException, UnresolvedLinkException { checkOpen(); if (permission == null) { permission = FsPermission.getDefault(); } FsPermission masked = permission.applyUMask(FsPermission.getUMask(conf)); LOG.debug(src + ": masked=" + masked); OutputStream result = new DFSOutputStream(this, src, masked, flag, createParent, replication, blockSize, progress, buffersize, conf.getInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT)); leasechecker.put(src, result); return result; } /** * Same as {{@link #create(String, FsPermission, EnumSet, short, long, * Progressable, int)} except that the permission * is absolute (ie has already been masked with umask. */ public OutputStream primitiveCreate(String src, FsPermission absPermission, EnumSet<CreateFlag> flag, boolean createParent, short replication, long blockSize, Progressable progress, int buffersize, int bytesPerChecksum) throws IOException, UnresolvedLinkException { checkOpen(); OutputStream result = new DFSOutputStream(this, src, absPermission, flag, createParent, replication, blockSize, progress, buffersize, bytesPerChecksum); leasechecker.put(src, result); return result; } /** * Creates a symbolic link. * * @see ClientProtocol#createSymlink(String, String,FsPermission, boolean) */ public void createSymlink(String target, String link, boolean createParent) throws IOException, UnresolvedLinkException { try { FsPermission dirPerm = FsPermission.getDefault().applyUMask(FsPermission.getUMask(conf)); namenode.createSymlink(target, link, dirPerm, createParent); } catch (RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, FileAlreadyExistsException.class, UnresolvedPathException.class); } } /** * Resolve the *first* symlink, if any, in the path. * * @see ClientProtocol#getLinkTarget(String) */ public String getLinkTarget(String path) throws IOException { checkOpen(); try { return namenode.getLinkTarget(path); } catch (RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class); } } /** * Append to an existing HDFS file. * * @param src file name * @param buffersize buffer size * @param progress for reporting write-progress * @return an output stream for writing into the file * @throws IOException * @throws UnresolvedLinkException if the path contains a symlink. * @see ClientProtocol#append(String, String) */ OutputStream append(String src, int buffersize, Progressable progress) throws IOException, UnresolvedLinkException { checkOpen(); HdfsFileStatus stat = null; LocatedBlock lastBlock = null; try { stat = getFileInfo(src); lastBlock = namenode.append(src, clientName); } catch(RemoteException re) { throw re.unwrapRemoteException(FileNotFoundException.class, AccessControlException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class); } OutputStream result = new DFSOutputStream(this, src, buffersize, progress, lastBlock, stat, conf.getInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_DEFAULT)); leasechecker.put(src, result); return result; } /** * Set replication for an existing file. * * @see ClientProtocol#setReplication(String, short) * @param replication * @throws IOException * @return true is successful or false if file does not exist */ public boolean setReplication(String src, short replication) throws IOException, UnresolvedLinkException { try { return namenode.setReplication(src, replication); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class); } } /** * Rename file or directory. * See {@link ClientProtocol#rename(String, String)}. * @deprecated Use {@link #rename(String, String, Options.Rename...)} instead. */ @Deprecated public boolean rename(String src, String dst) throws IOException, UnresolvedLinkException { checkOpen(); try { return namenode.rename(src, dst); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class); } } /** * Move blocks from src to trg and delete src * See {@link ClientProtocol#concat(String, String [])}. */ public void concat(String trg, String [] srcs) throws IOException, UnresolvedLinkException { checkOpen(); try { namenode.concat(trg, srcs); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class); } } /** * Rename file or directory. * See {@link ClientProtocol#rename(String, String, Options.Rename...)} */ public void rename(String src, String dst, Options.Rename... options) throws IOException, UnresolvedLinkException { checkOpen(); try { namenode.rename(src, dst, options); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class); } } /** * Delete file or directory. * See {@link ClientProtocol#delete(String)}. */ @Deprecated public boolean delete(String src) throws IOException, UnresolvedLinkException { checkOpen(); return namenode.delete(src, true); } /** * delete file or directory. * delete contents of the directory if non empty and recursive * set to true */ public boolean delete(String src, boolean recursive) throws IOException, UnresolvedLinkException { checkOpen(); try { return namenode.delete(src, recursive); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, UnresolvedPathException.class); } } /** Implemented using getFileInfo(src) */ public boolean exists(String src) throws IOException { checkOpen(); return getFileInfo(src) != null; } /** * Get a partial listing of the indicated directory * * Recommend to use HdfsFileStatus.EMPTY_NAME as startAfter * if the application wants to fetch a listing starting from * the first entry in the directory * * @param src the directory name * @param startAfter the name to start listing after encoded in java UTF8 * @return a partial listing starting after startAfter */ public DirectoryListing listPaths(String src, byte[] startAfter) throws IOException, UnresolvedLinkException { checkOpen(); try { return namenode.getListing(src, startAfter); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, UnresolvedPathException.class); } } public HdfsFileStatus getFileInfo(String src) throws IOException, UnresolvedLinkException { checkOpen(); try { return namenode.getFileInfo(src); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, UnresolvedPathException.class); } } /** * Get the file info for a specific file or directory. If src * refers to a symlink then the FileStatus of the link is returned. * @param src path to a file or directory. * @throws IOException * @throws UnresolvedLinkException if the path contains symlinks * @return FileStatus describing src. */ public HdfsFileStatus getFileLinkInfo(String src) throws IOException, UnresolvedLinkException { checkOpen(); try { return namenode.getFileLinkInfo(src); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, UnresolvedPathException.class); } } /** * Get the checksum of a file. * @param src The file path * @return The checksum * @see DistributedFileSystem#getFileChecksum(Path) */ public MD5MD5CRC32FileChecksum getFileChecksum(String src) throws IOException { checkOpen(); return getFileChecksum(src, namenode, socketFactory, socketTimeout); } /** * Get the checksum of a file. * @param src The file path * @return The checksum */ public static MD5MD5CRC32FileChecksum getFileChecksum(String src, ClientProtocol namenode, SocketFactory socketFactory, int socketTimeout ) throws IOException { //get all block locations List<LocatedBlock> locatedblocks = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE).getLocatedBlocks(); final DataOutputBuffer md5out = new DataOutputBuffer(); int bytesPerCRC = 0; long crcPerBlock = 0; boolean refetchBlocks = false; int lastRetriedIndex = -1; //get block checksum for each block for(int i = 0; i < locatedblocks.size(); i++) { if (refetchBlocks) { // refetch to get fresh tokens locatedblocks = callGetBlockLocations(namenode, src, 0, Long.MAX_VALUE) .getLocatedBlocks(); refetchBlocks = false; } LocatedBlock lb = locatedblocks.get(i); final Block block = lb.getBlock(); final DatanodeInfo[] datanodes = lb.getLocations(); //try each datanode location of the block final int timeout = 3000 * datanodes.length + socketTimeout; boolean done = false; for(int j = 0; !done && j < datanodes.length; j++) { Socket sock = null; DataOutputStream out = null; DataInputStream in = null; try { //connect to a datanode sock = socketFactory.createSocket(); NetUtils.connect(sock, NetUtils.createSocketAddr(datanodes[j].getName()), timeout); sock.setSoTimeout(timeout); out = new DataOutputStream( new BufferedOutputStream(NetUtils.getOutputStream(sock), DataNode.SMALL_BUFFER_SIZE)); in = new DataInputStream(NetUtils.getInputStream(sock)); if (LOG.isDebugEnabled()) { LOG.debug("write to " + datanodes[j].getName() + ": " + BLOCK_CHECKSUM + ", block=" + block); } // get block MD5 DataTransferProtocol.Sender.opBlockChecksum(out, block.getBlockId(), block.getGenerationStamp(), lb.getAccessToken()); final DataTransferProtocol.Status reply = DataTransferProtocol.Status.read(in); if (reply != SUCCESS) { if (reply == ERROR_ACCESS_TOKEN && i > lastRetriedIndex) { if (LOG.isDebugEnabled()) { LOG.debug("Got access token error in response to OP_BLOCK_CHECKSUM " + "for file " + src + " for block " + block + " from datanode " + datanodes[j].getName() + ". Will retry the block once."); } lastRetriedIndex = i; done = true; // actually it's not done; but we'll retry i--; // repeat at i-th block refetchBlocks = true; break; } else { throw new IOException("Bad response " + reply + " for block " + block + " from datanode " + datanodes[j].getName()); } } //read byte-per-checksum final int bpc = in.readInt(); if (i == 0) { //first block bytesPerCRC = bpc; } else if (bpc != bytesPerCRC) { throw new IOException("Byte-per-checksum not matched: bpc=" + bpc + " but bytesPerCRC=" + bytesPerCRC); } //read crc-per-block final long cpb = in.readLong(); if (locatedblocks.size() > 1 && i == 0) { crcPerBlock = cpb; } //read md5 final MD5Hash md5 = MD5Hash.read(in); md5.write(md5out); done = true; if (LOG.isDebugEnabled()) { if (i == 0) { LOG.debug("set bytesPerCRC=" + bytesPerCRC + ", crcPerBlock=" + crcPerBlock); } LOG.debug("got reply from " + datanodes[j].getName() + ": md5=" + md5); } } catch (IOException ie) { LOG.warn("src=" + src + ", datanodes[" + j + "].getName()=" + datanodes[j].getName(), ie); } finally { IOUtils.closeStream(in); IOUtils.closeStream(out); IOUtils.closeSocket(sock); } } if (!done) { throw new IOException("Fail to get block MD5 for " + block); } } //compute file MD5 final MD5Hash fileMD5 = MD5Hash.digest(md5out.getData()); return new MD5MD5CRC32FileChecksum(bytesPerCRC, crcPerBlock, fileMD5); } /** * Set permissions to a file or directory. * @param src path name. * @param permission * @throws <code>FileNotFoundException</code> is file does not exist. * @throws UnresolvedLinkException if the path contains a symlink. */ public void setPermission(String src, FsPermission permission) throws IOException, UnresolvedLinkException { checkOpen(); try { namenode.setPermission(src, permission); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } /** * Set file or directory owner. * @param src path name. * @param username user id. * @param groupname user group. * @throws <code>FileNotFoundException</code> is file does not exist. * @throws UnresolvedLinkException if the path contains a symlink. */ public void setOwner(String src, String username, String groupname) throws IOException, UnresolvedLinkException { checkOpen(); try { namenode.setOwner(src, username, groupname); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } public FsStatus getDiskStatus() throws IOException { long rawNums[] = namenode.getStats(); return new FsStatus(rawNums[0], rawNums[1], rawNums[2]); } /** * Returns count of blocks with no good replicas left. Normally should be * zero. * @throws IOException */ public long getMissingBlocksCount() throws IOException { return namenode.getStats()[ClientProtocol.GET_STATS_MISSING_BLOCKS_IDX]; } /** * Returns count of blocks with one of more replica missing. * @throws IOException */ public long getUnderReplicatedBlocksCount() throws IOException { return namenode.getStats()[ClientProtocol.GET_STATS_UNDER_REPLICATED_IDX]; } /** * Returns count of blocks with at least one replica marked corrupt. * @throws IOException */ public long getCorruptBlocksCount() throws IOException { return namenode.getStats()[ClientProtocol.GET_STATS_CORRUPT_BLOCKS_IDX]; } public DatanodeInfo[] datanodeReport(DatanodeReportType type) throws IOException { return namenode.getDatanodeReport(type); } /** * Enter, leave or get safe mode. * See {@link ClientProtocol#setSafeMode(FSConstants.SafeModeAction)} * for more details. * * @see ClientProtocol#setSafeMode(FSConstants.SafeModeAction) */ public boolean setSafeMode(SafeModeAction action) throws IOException { return namenode.setSafeMode(action); } /** * Save namespace image. * See {@link ClientProtocol#saveNamespace()} * for more details. * * @see ClientProtocol#saveNamespace() */ void saveNamespace() throws AccessControlException, IOException { try { namenode.saveNamespace(); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class); } } /** * enable/disable restore failed storage. * See {@link ClientProtocol#restoreFailedStorage(String arg)} * for more details. * */ boolean restoreFailedStorage(String arg) throws AccessControlException { return namenode.restoreFailedStorage(arg); } /** * Refresh the hosts and exclude files. (Rereads them.) * See {@link ClientProtocol#refreshNodes()} * for more details. * * @see ClientProtocol#refreshNodes() */ public void refreshNodes() throws IOException { namenode.refreshNodes(); } /** * Dumps DFS data structures into specified file. * See {@link ClientProtocol#metaSave(String)} * for more details. * * @see ClientProtocol#metaSave(String) */ public void metaSave(String pathname) throws IOException { namenode.metaSave(pathname); } /** * @see ClientProtocol#finalizeUpgrade() */ public void finalizeUpgrade() throws IOException { namenode.finalizeUpgrade(); } /** * @see ClientProtocol#distributedUpgradeProgress(FSConstants.UpgradeAction) */ public UpgradeStatusReport distributedUpgradeProgress(UpgradeAction action ) throws IOException { return namenode.distributedUpgradeProgress(action); } /** */ @Deprecated public boolean mkdirs(String src) throws IOException { return mkdirs(src, null, true); } /** * Create a directory (or hierarchy of directories) with the given * name and permission. * * @param src The path of the directory being created * @param permission The permission of the directory being created. * If permission == null, use {@link FsPermission#getDefault()}. * @param createParent create missing parent directory if true * @return True if the operation success. * @throws UnresolvedLinkException if the path contains a symlink. * @see ClientProtocol#mkdirs(String, FsPermission, boolean) */ public boolean mkdirs(String src, FsPermission permission, boolean createParent) throws IOException, UnresolvedLinkException { checkOpen(); if (permission == null) { permission = FsPermission.getDefault(); } FsPermission masked = permission.applyUMask(FsPermission.getUMask(conf)); LOG.debug(src + ": masked=" + masked); try { return namenode.mkdirs(src, masked, createParent); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, FileNotFoundException.class, FileAlreadyExistsException.class, UnresolvedPathException.class); } } /** * Same {{@link #mkdirs(String, FsPermission, boolean)} except * that the permissions has already been masked against umask. * @throws UnresolvedLinkException if the path contains a symlink. */ public boolean primitiveMkdir(String src, FsPermission absPermission) throws IOException, UnresolvedLinkException { checkOpen(); if (absPermission == null) { absPermission = FsPermission.getDefault().applyUMask(FsPermission.getUMask(conf)); } LOG.debug(src + ": masked=" + absPermission); try { return namenode.mkdirs(src, absPermission, true); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class); } } ContentSummary getContentSummary(String src) throws IOException { try { return namenode.getContentSummary(src); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } /** * Sets or resets quotas for a directory. * @see org.apache.hadoop.hdfs.protocol.ClientProtocol#setQuota(String, long, long) */ void setQuota(String src, long namespaceQuota, long diskspaceQuota) throws IOException, UnresolvedLinkException { // sanity check if ((namespaceQuota <= 0 && namespaceQuota != FSConstants.QUOTA_DONT_SET && namespaceQuota != FSConstants.QUOTA_RESET) || (diskspaceQuota <= 0 && diskspaceQuota != FSConstants.QUOTA_DONT_SET && diskspaceQuota != FSConstants.QUOTA_RESET)) { throw new IllegalArgumentException("Invalid values for quota : " + namespaceQuota + " and " + diskspaceQuota); } try { namenode.setQuota(src, namespaceQuota, diskspaceQuota); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, NSQuotaExceededException.class, DSQuotaExceededException.class, UnresolvedPathException.class); } } /** * set the modification and access time of a file * @throws FileNotFoundException if the path is not a file */ public void setTimes(String src, long mtime, long atime) throws IOException, UnresolvedLinkException { checkOpen(); try { namenode.setTimes(src, mtime, atime); } catch(RemoteException re) { throw re.unwrapRemoteException(AccessControlException.class, FileNotFoundException.class, UnresolvedPathException.class); } } boolean isLeaseCheckerStarted() { return leasechecker.daemon != null; } /** Lease management*/ class LeaseChecker implements Runnable { /** A map from src -> DFSOutputStream of files that are currently being * written by this client. */ private final SortedMap<String, OutputStream> pendingCreates = new TreeMap<String, OutputStream>(); private Daemon daemon = null; synchronized void put(String src, OutputStream out) { if (clientRunning) { if (daemon == null) { daemon = new Daemon(this); daemon.start(); } pendingCreates.put(src, out); } } synchronized void remove(String src) { pendingCreates.remove(src); } void interruptAndJoin() throws InterruptedException { Daemon daemonCopy = null; synchronized (this) { if (daemon != null) { daemon.interrupt(); daemonCopy = daemon; } } if (daemonCopy != null) { LOG.debug("Wait for lease checker to terminate"); daemonCopy.join(); } } void close() { while (true) { String src; OutputStream out; synchronized (this) { if (pendingCreates.isEmpty()) { return; } src = pendingCreates.firstKey(); out = pendingCreates.remove(src); } if (out != null) { try { out.close(); } catch (IOException ie) { LOG.error("Exception closing file " + src+ " : " + ie, ie); } } } } /** * Abort all open files. Release resources held. Ignore all errors. */ synchronized void abort() { clientRunning = false; while (!pendingCreates.isEmpty()) { String src = pendingCreates.firstKey(); DFSOutputStream out = (DFSOutputStream)pendingCreates.remove(src); if (out != null) { try { out.abort(); } catch (IOException ie) { LOG.error("Exception aborting file " + src+ ": ", ie); } } } RPC.stopProxy(rpcNamenode); // close connections to the namenode } private void renew() throws IOException { synchronized(this) { if (pendingCreates.isEmpty()) { return; } } namenode.renewLease(clientName); } /** * Periodically check in with the namenode and renew all the leases * when the lease period is half over. */ public void run() { long lastRenewed = 0; int renewal = (int)(LEASE_SOFTLIMIT_PERIOD / 2); if (hdfsTimeout > 0) { renewal = Math.min(renewal, hdfsTimeout/2); } while (clientRunning && !Thread.interrupted()) { if (System.currentTimeMillis() - lastRenewed > renewal) { try { renew(); lastRenewed = System.currentTimeMillis(); } catch (SocketTimeoutException ie) { LOG.warn("Problem renewing lease for " + clientName + " for a period of " + (hdfsTimeout/1000) + " seconds. Shutting down HDFS client...", ie); abort(); break; } catch (IOException ie) { LOG.warn("Problem renewing lease for " + clientName + " for a period of " + (hdfsTimeout/1000) + " seconds. Will retry shortly...", ie); } } try { Thread.sleep(1000); } catch (InterruptedException ie) { if (LOG.isDebugEnabled()) { LOG.debug(this + " is interrupted.", ie); } return; } } } /** {@inheritDoc} */ public String toString() { String s = getClass().getSimpleName(); if (LOG.isTraceEnabled()) { return s + "@" + DFSClient.this + ": " + StringUtils.stringifyException(new Throwable("for testing")); } return s; } } /** * The Hdfs implementation of {@link FSDataInputStream} */ @InterfaceAudience.Private public static class DFSDataInputStream extends FSDataInputStream { public DFSDataInputStream(DFSInputStream in) throws IOException { super(in); } /** * Returns the datanode from which the stream is currently reading. */ public DatanodeInfo getCurrentDatanode() { return ((DFSInputStream)in).getCurrentDatanode(); } /** * Returns the block containing the target position. */ public Block getCurrentBlock() { return ((DFSInputStream)in).getCurrentBlock(); } /** * Return collection of blocks that has already been located. */ synchronized List<LocatedBlock> getAllBlocks() throws IOException { return ((DFSInputStream)in).getAllBlocks(); } /** * @return The visible length of the file. */ public long getVisibleLength() throws IOException { return ((DFSInputStream)in).getFileLength(); } } void reportChecksumFailure(String file, Block blk, DatanodeInfo dn) { DatanodeInfo [] dnArr = { dn }; LocatedBlock [] lblocks = { new LocatedBlock(blk, dnArr) }; reportChecksumFailure(file, lblocks); } // just reports checksum failure and ignores any exception during the report. void reportChecksumFailure(String file, LocatedBlock lblocks[]) { try { reportBadBlocks(lblocks); } catch (IOException ie) { LOG.info("Found corruption while reading " + file + ". Error repairing corrupt blocks. Bad blocks remain. " + StringUtils.stringifyException(ie)); } } /** {@inheritDoc} */ public String toString() { return getClass().getSimpleName() + "[clientName=" + clientName + ", ugi=" + ugi + "]"; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.mapper; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class MultiFieldsIntegrationIT extends ESIntegTestCase { @SuppressWarnings("unchecked") public void testMultiFields() throws Exception { assertAcked( client().admin().indices().prepareCreate("my-index") .setMapping(createTypeSource()) ); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map<String, Object> mappingSource = mappingMetadata.sourceAsMap(); Map<String, Object> titleFields = ((Map<String, Object>) XContentMapValues.extractValue("properties.title.fields", mappingSource)); assertThat(titleFields.size(), equalTo(1)); assertThat(titleFields.get("not_analyzed"), notNullValue()); assertThat(((Map<String, Object>) titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword")); client().prepareIndex("my-index").setId("1") .setSource("title", "Multi fields") .setRefreshPolicy(IMMEDIATE) .get(); SearchResponse searchResponse = client().prepareSearch("my-index") .setQuery(matchQuery("title", "multi")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); searchResponse = client().prepareSearch("my-index") .setQuery(matchQuery("title.not_analyzed", "Multi fields")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertAcked( client().admin().indices().preparePutMapping("my-index") .setSource(createPutMappingSource()) ); getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); mappingSource = mappingMetadata.sourceAsMap(); assertThat(((Map<String, Object>) XContentMapValues.extractValue("properties.title", mappingSource)).size(), equalTo(2)); titleFields = ((Map<String, Object>) XContentMapValues.extractValue("properties.title.fields", mappingSource)); assertThat(titleFields.size(), equalTo(2)); assertThat(titleFields.get("not_analyzed"), notNullValue()); assertThat(((Map<String, Object>) titleFields.get("not_analyzed")).get("type").toString(), equalTo("keyword")); assertThat(titleFields.get("uncased"), notNullValue()); assertThat(((Map<String, Object>) titleFields.get("uncased")).get("analyzer").toString(), equalTo("whitespace")); client().prepareIndex("my-index").setId("1") .setSource("title", "Multi fields") .setRefreshPolicy(IMMEDIATE) .get(); searchResponse = client().prepareSearch("my-index") .setQuery(matchQuery("title.uncased", "Multi")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } @SuppressWarnings("unchecked") public void testGeoPointMultiField() throws Exception { assertAcked( client().admin().indices().prepareCreate("my-index") .setMapping(createMappingSource("geo_point")) ); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map<String, Object> mappingSource = mappingMetadata.sourceAsMap(); Map<String, Object> aField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a", mappingSource)); logger.info("Keys: {}", aField.keySet()); assertThat(aField.size(), equalTo(2)); assertThat(aField.get("type").toString(), equalTo("geo_point")); assertThat(aField.get("fields"), notNullValue()); Map<String, Object> bField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a.fields.b", mappingSource)); assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); GeoPoint point = new GeoPoint(51, 19); client().prepareIndex("my-index").setId("1").setSource("a", point.toString()).setRefreshPolicy(IMMEDIATE).get(); SearchResponse countResponse = client().prepareSearch("my-index").setSize(0) .setQuery(constantScoreQuery(geoDistanceQuery("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS))) .get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.geohash())).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } @SuppressWarnings("unchecked") public void testCompletionMultiField() throws Exception { assertAcked( client().admin().indices().prepareCreate("my-index") .setMapping(createMappingSource("completion")) ); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map<String, Object> mappingSource = mappingMetadata.sourceAsMap(); Map<String, Object> aField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a", mappingSource)); assertThat(aField.size(), equalTo(6)); assertThat(aField.get("type").toString(), equalTo("completion")); assertThat(aField.get("fields"), notNullValue()); Map<String, Object> bField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a.fields.b", mappingSource)); assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); client().prepareIndex("my-index").setId("1").setSource("a", "complete me").setRefreshPolicy(IMMEDIATE).get(); SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "complete me")).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } @SuppressWarnings("unchecked") public void testIpMultiField() throws Exception { assertAcked( client().admin().indices().prepareCreate("my-index") .setMapping(createMappingSource("ip")) ); GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("my-index").get(); MappingMetadata mappingMetadata = getMappingsResponse.mappings().get("my-index"); assertThat(mappingMetadata, not(nullValue())); Map<String, Object> mappingSource = mappingMetadata.sourceAsMap(); Map<String, Object> aField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a", mappingSource)); assertThat(aField.size(), equalTo(2)); assertThat(aField.get("type").toString(), equalTo("ip")); assertThat(aField.get("fields"), notNullValue()); Map<String, Object> bField = ((Map<String, Object>) XContentMapValues.extractValue("properties.a.fields.b", mappingSource)); assertThat(bField.size(), equalTo(1)); assertThat(bField.get("type").toString(), equalTo("keyword")); client().prepareIndex("my-index").setId("1").setSource("a", "127.0.0.1").setRefreshPolicy(IMMEDIATE).get(); SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "127.0.0.1")).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } private XContentBuilder createMappingSource(String fieldType) throws IOException { return XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("a") .field("type", fieldType) .startObject("fields") .startObject("b") .field("type", "keyword") .endObject() .endObject() .endObject() .endObject() .endObject().endObject(); } private XContentBuilder createTypeSource() throws IOException { return XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties") .startObject("title") .field("type", "text") .startObject("fields") .startObject("not_analyzed") .field("type", "keyword") .endObject() .endObject() .endObject() .endObject() .endObject().endObject(); } private XContentBuilder createPutMappingSource() throws IOException { return XContentFactory.jsonBuilder().startObject() .startObject("properties") .startObject("title") .field("type", "text") .startObject("fields") .startObject("uncased") .field("type", "text") .field("analyzer", "whitespace") .endObject() .endObject() .endObject() .endObject() .endObject(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.smoketest; import io.netty.util.ThreadDeathWatcher; import io.netty.util.concurrent.GlobalEventExecutor; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.indices.GetIndexRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.GetIndexTemplatesResponse; import org.elasticsearch.client.xpack.XPackUsageRequest; import org.elasticsearch.client.xpack.XPackUsageResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.PathUtils; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ObjectPath; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.rules.ExternalResource; import java.io.IOException; import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; /** * This test checks that a Monitoring's HTTP exporter correctly exports to a monitoring cluster * protected by security with HTTPS/SSL. * * It sets up a cluster with Monitoring and Security configured with SSL. Once started, * an HTTP exporter is activated and it exports data locally over HTTPS/SSL. The test * then uses a rest client to check that the data have been correctly received and * indexed in the cluster. */ @SuppressWarnings("removal") public class SmokeTestMonitoringWithSecurityIT extends ESRestTestCase { public class TestRestHighLevelClient extends RestHighLevelClient { TestRestHighLevelClient() { super(client(), RestClient::close, Collections.emptyList()); } } /** * A JUnit class level rule that runs after the AfterClass method in {@link ESIntegTestCase}, * which stops the cluster. After the cluster is stopped, there are a few netty threads that * can linger, so we wait for them to finish otherwise these lingering threads can intermittently * trigger the thread leak detector */ @ClassRule public static final ExternalResource STOP_NETTY_RESOURCE = new ExternalResource() { @Override protected void after() { try { GlobalEventExecutor.INSTANCE.awaitInactivity(5, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (IllegalStateException e) { if (e.getMessage().equals("thread was not started") == false) { throw e; } // ignore since the thread was never started } try { ThreadDeathWatcher.awaitInactivity(5, TimeUnit.SECONDS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } }; private static final String USER = "test_user"; private static final SecureString PASS = new SecureString("x-pack-test-password".toCharArray()); private static final String KEYSTORE_PASS = "testnode"; private static final String MONITORING_PATTERN = ".monitoring-*"; static Path keyStore; @BeforeClass public static void getKeyStore() { try { keyStore = PathUtils.get(SmokeTestMonitoringWithSecurityIT.class.getResource("/testnode.jks").toURI()); } catch (URISyntaxException e) { throw new ElasticsearchException("exception while reading the store", e); } if (Files.exists(keyStore) == false) { throw new IllegalStateException("Keystore file [" + keyStore + "] does not exist."); } } @AfterClass public static void clearKeyStore() { keyStore = null; } RestHighLevelClient newHighLevelClient() { return new TestRestHighLevelClient(); } @Override protected String getProtocol() { return "https"; } @Override protected Settings restClientSettings() { String token = basicAuthHeaderValue(USER, PASS); return Settings.builder() .put(ThreadContext.PREFIX + ".Authorization", token) .put(ESRestTestCase.TRUSTSTORE_PATH, keyStore) .put(ESRestTestCase.TRUSTSTORE_PASSWORD, KEYSTORE_PASS) .build(); } @Before public void enableExporter() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.monitoring.exporters._http.auth.secure_password", "x-pack-test-password"); Settings exporterSettings = Settings.builder() .put("xpack.monitoring.collection.enabled", true) .put("xpack.monitoring.exporters._http.enabled", true) .put("xpack.monitoring.exporters._http.type", "http") .put("xpack.monitoring.exporters._http.host", "https://" + randomNodeHttpAddress()) .put("xpack.monitoring.exporters._http.auth.username", "monitoring_agent") .put("xpack.monitoring.exporters._http.ssl.verification_mode", "full") .put("xpack.monitoring.exporters._http.ssl.certificate_authorities", "testnode.crt") .setSecureSettings(secureSettings) .build(); ClusterUpdateSettingsResponse response = newHighLevelClient().cluster() .putSettings(new ClusterUpdateSettingsRequest().transientSettings(exporterSettings), RequestOptions.DEFAULT); assertTrue(response.isAcknowledged()); } @After public void disableExporter() throws IOException { Settings exporterSettings = Settings.builder() .putNull("xpack.monitoring.collection.enabled") .putNull("xpack.monitoring.exporters._http.enabled") .putNull("xpack.monitoring.exporters._http.type") .putNull("xpack.monitoring.exporters._http.host") .putNull("xpack.monitoring.exporters._http.auth.username") .putNull("xpack.monitoring.exporters._http.ssl.verification_mode") .putNull("xpack.monitoring.exporters._http.ssl.certificate_authorities") .build(); ClusterUpdateSettingsResponse response = newHighLevelClient().cluster() .putSettings(new ClusterUpdateSettingsRequest().transientSettings(exporterSettings), RequestOptions.DEFAULT); assertTrue(response.isAcknowledged()); } private boolean getMonitoringUsageExportersDefined() throws Exception { RestHighLevelClient client = newHighLevelClient(); final XPackUsageResponse usageResponse = client.xpack().usage(new XPackUsageRequest(), RequestOptions.DEFAULT); Map<String, Object> monitoringUsage = usageResponse.getUsages().get("monitoring"); assertThat("Monitoring feature set does not exist", monitoringUsage, notNullValue()); @SuppressWarnings("unchecked") Map<String, Object> exporters = (Map<String, Object>) monitoringUsage.get("enabled_exporters"); return exporters != null && exporters.isEmpty() == false; } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/49094") public void testHTTPExporterWithSSL() throws Exception { // Ensures that the exporter is actually on assertBusy(() -> assertThat("[_http] exporter is not defined", getMonitoringUsageExportersDefined(), is(true))); RestHighLevelClient client = newHighLevelClient(); // Checks that the monitoring index templates have been installed GetIndexTemplatesRequest templateRequest = new GetIndexTemplatesRequest(MONITORING_PATTERN); assertBusy(() -> { try { GetIndexTemplatesResponse response = client.indices().getIndexTemplate(templateRequest, RequestOptions.DEFAULT); assertThat(response.getIndexTemplates().size(), greaterThanOrEqualTo(2)); } catch (Exception e) { fail("template not ready yet: " + e.getMessage()); } }); GetIndexRequest indexRequest = new GetIndexRequest(MONITORING_PATTERN); // Waits for monitoring indices to be created assertBusy(() -> { try { assertThat(client.indices().exists(indexRequest, RequestOptions.DEFAULT), equalTo(true)); } catch (Exception e) { fail("monitoring index not created yet: " + e.getMessage()); } }); // Waits for indices to be ready ClusterHealthRequest healthRequest = new ClusterHealthRequest(MONITORING_PATTERN); healthRequest.waitForStatus(ClusterHealthStatus.YELLOW); healthRequest.waitForEvents(Priority.LANGUID); healthRequest.waitForNoRelocatingShards(true); healthRequest.waitForNoInitializingShards(true); ClusterHealthResponse response = client.cluster().health(healthRequest, RequestOptions.DEFAULT); assertThat(response.isTimedOut(), is(false)); // Checks that the HTTP exporter has successfully exported some data SearchRequest searchRequest = new SearchRequest(new String[] { MONITORING_PATTERN }, new SearchSourceBuilder().size(0)); assertBusy(() -> { try { assertThat(client.search(searchRequest, RequestOptions.DEFAULT).getHits().getTotalHits().value, greaterThan(0L)); } catch (Exception e) { fail("monitoring date not exported yet: " + e.getMessage()); } }); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/49094") public void testSettingsFilter() throws IOException { final Request request = new Request("GET", "/_cluster/settings"); final Response response = client().performRequest(request); final ObjectPath path = ObjectPath.createFromResponse(response); final Map<String, Object> settings = path.evaluate("transient.xpack.monitoring.exporters._http"); assertThat(settings, hasKey("type")); assertThat(settings, not(hasKey("auth"))); assertThat(settings, not(hasKey("ssl"))); } @SuppressWarnings("unchecked") private String randomNodeHttpAddress() throws IOException { Response response = client().performRequest(new Request("GET", "/_nodes")); assertOK(response); ObjectPath objectPath = ObjectPath.createFromResponse(response); Map<String, Object> nodesAsMap = objectPath.evaluate("nodes"); List<String> httpAddresses = new ArrayList<>(); for (Map.Entry<String, Object> entry : nodesAsMap.entrySet()) { Map<String, Object> nodeDetails = (Map<String, Object>) entry.getValue(); Map<String, Object> httpInfo = (Map<String, Object>) nodeDetails.get("http"); httpAddresses.add((String) httpInfo.get("publish_address")); } assertThat(httpAddresses.size(), greaterThan(0)); return randomFrom(httpAddresses); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.rel.rules; import org.apache.calcite.linq4j.Ord; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.plan.RelRule; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.Aggregate; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rel.core.Join; import org.apache.calcite.rel.core.JoinRelType; import org.apache.calcite.rel.core.RelFactories; import org.apache.calcite.rel.logical.LogicalAggregate; import org.apache.calcite.rel.logical.LogicalJoin; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.SqlSplittableAggFunction; import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.tools.RelBuilderFactory; import org.apache.calcite.util.Bug; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.Util; import org.apache.calcite.util.mapping.Mapping; import org.apache.calcite.util.mapping.Mappings; import com.google.common.collect.ImmutableList; import org.checkerframework.checker.nullness.qual.Nullable; import org.immutables.value.Value; import java.util.ArrayList; import java.util.BitSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; import static java.util.Objects.requireNonNull; /** * Planner rule that pushes an * {@link org.apache.calcite.rel.core.Aggregate} * past a {@link org.apache.calcite.rel.core.Join}. * * @see CoreRules#AGGREGATE_JOIN_TRANSPOSE * @see CoreRules#AGGREGATE_JOIN_TRANSPOSE_EXTENDED */ @Value.Enclosing public class AggregateJoinTransposeRule extends RelRule<AggregateJoinTransposeRule.Config> implements TransformationRule { /** Creates an AggregateJoinTransposeRule. */ protected AggregateJoinTransposeRule(Config config) { super(config); } @Deprecated // to be removed before 2.0 public AggregateJoinTransposeRule(Class<? extends Aggregate> aggregateClass, Class<? extends Join> joinClass, RelBuilderFactory relBuilderFactory, boolean allowFunctions) { this(Config.DEFAULT .withRelBuilderFactory(relBuilderFactory) .as(Config.class) .withOperandFor(aggregateClass, joinClass, allowFunctions)); } @Deprecated // to be removed before 2.0 public AggregateJoinTransposeRule(Class<? extends Aggregate> aggregateClass, RelFactories.AggregateFactory aggregateFactory, Class<? extends Join> joinClass, RelFactories.JoinFactory joinFactory) { this(aggregateClass, joinClass, RelBuilder.proto(aggregateFactory, joinFactory), false); } @Deprecated // to be removed before 2.0 public AggregateJoinTransposeRule(Class<? extends Aggregate> aggregateClass, RelFactories.AggregateFactory aggregateFactory, Class<? extends Join> joinClass, RelFactories.JoinFactory joinFactory, boolean allowFunctions) { this(aggregateClass, joinClass, RelBuilder.proto(aggregateFactory, joinFactory), allowFunctions); } @Deprecated // to be removed before 2.0 public AggregateJoinTransposeRule(Class<? extends Aggregate> aggregateClass, RelFactories.AggregateFactory aggregateFactory, Class<? extends Join> joinClass, RelFactories.JoinFactory joinFactory, RelFactories.ProjectFactory projectFactory) { this(aggregateClass, joinClass, RelBuilder.proto(aggregateFactory, joinFactory, projectFactory), false); } @Deprecated // to be removed before 2.0 public AggregateJoinTransposeRule(Class<? extends Aggregate> aggregateClass, RelFactories.AggregateFactory aggregateFactory, Class<? extends Join> joinClass, RelFactories.JoinFactory joinFactory, RelFactories.ProjectFactory projectFactory, boolean allowFunctions) { this(aggregateClass, joinClass, RelBuilder.proto(aggregateFactory, joinFactory, projectFactory), allowFunctions); } private static boolean isAggregateSupported(Aggregate aggregate, boolean allowFunctions) { if (!allowFunctions && !aggregate.getAggCallList().isEmpty()) { return false; } if (aggregate.getGroupType() != Aggregate.Group.SIMPLE) { return false; } // If any aggregate functions do not support splitting, bail out // If any aggregate call has a filter or is distinct, bail out for (AggregateCall aggregateCall : aggregate.getAggCallList()) { if (aggregateCall.getAggregation().unwrap(SqlSplittableAggFunction.class) == null) { return false; } if (aggregateCall.filterArg >= 0 || aggregateCall.isDistinct()) { return false; } } return true; } // OUTER joins are supported for group by without aggregate functions // FULL OUTER JOIN is not supported since it could produce wrong result // due to bug (CALCITE-3012) private static boolean isJoinSupported(final Join join, final Aggregate aggregate) { return join.getJoinType() == JoinRelType.INNER || aggregate.getAggCallList().isEmpty(); } @Override public void onMatch(RelOptRuleCall call) { final Aggregate aggregate = call.rel(0); final Join join = call.rel(1); final RexBuilder rexBuilder = aggregate.getCluster().getRexBuilder(); final RelBuilder relBuilder = call.builder(); if (!isJoinSupported(join, aggregate)) { return; } // Do the columns used by the join appear in the output of the aggregate? final ImmutableBitSet aggregateColumns = aggregate.getGroupSet(); final RelMetadataQuery mq = call.getMetadataQuery(); final ImmutableBitSet keyColumns = keyColumns(aggregateColumns, mq.getPulledUpPredicates(join).pulledUpPredicates); final ImmutableBitSet joinColumns = RelOptUtil.InputFinder.bits(join.getCondition()); final boolean allColumnsInAggregate = keyColumns.contains(joinColumns); final ImmutableBitSet belowAggregateColumns = aggregateColumns.union(joinColumns); // Split join condition final List<Integer> leftKeys = new ArrayList<>(); final List<Integer> rightKeys = new ArrayList<>(); final List<Boolean> filterNulls = new ArrayList<>(); RexNode nonEquiConj = RelOptUtil.splitJoinCondition(join.getLeft(), join.getRight(), join.getCondition(), leftKeys, rightKeys, filterNulls); // If it contains non-equi join conditions, we bail out if (!nonEquiConj.isAlwaysTrue()) { return; } // Push each aggregate function down to each side that contains all of its // arguments. Note that COUNT(*), because it has no arguments, can go to // both sides. final Map<Integer, Integer> map = new HashMap<>(); final List<Side> sides = new ArrayList<>(); int uniqueCount = 0; int offset = 0; int belowOffset = 0; for (int s = 0; s < 2; s++) { final Side side = new Side(); final RelNode joinInput = join.getInput(s); int fieldCount = joinInput.getRowType().getFieldCount(); final ImmutableBitSet fieldSet = ImmutableBitSet.range(offset, offset + fieldCount); final ImmutableBitSet belowAggregateKeyNotShifted = belowAggregateColumns.intersect(fieldSet); for (Ord<Integer> c : Ord.zip(belowAggregateKeyNotShifted)) { map.put(c.e, belowOffset + c.i); } final Mappings.TargetMapping mapping = s == 0 ? Mappings.createIdentity(fieldCount) : Mappings.createShiftMapping(fieldCount + offset, 0, offset, fieldCount); final ImmutableBitSet belowAggregateKey = belowAggregateKeyNotShifted.shift(-offset); final boolean unique; if (!config.isAllowFunctions()) { assert aggregate.getAggCallList().isEmpty(); // If there are no functions, it doesn't matter as much whether we // aggregate the inputs before the join, because there will not be // any functions experiencing a cartesian product effect. // // But finding out whether the input is already unique requires a call // to areColumnsUnique that currently (until [CALCITE-1048] "Make // metadata more robust" is fixed) places a heavy load on // the metadata system. // // So we choose to imagine the the input is already unique, which is // untrue but harmless. // Util.discard(Bug.CALCITE_1048_FIXED); unique = true; } else { final Boolean unique0 = mq.areColumnsUnique(joinInput, belowAggregateKey); unique = unique0 != null && unique0; } if (unique) { ++uniqueCount; side.aggregate = false; relBuilder.push(joinInput); final List<RexNode> projects = new ArrayList<>(); for (Integer i : belowAggregateKey) { projects.add(relBuilder.field(i)); } for (Ord<AggregateCall> aggCall : Ord.zip(aggregate.getAggCallList())) { final SqlAggFunction aggregation = aggCall.e.getAggregation(); final SqlSplittableAggFunction splitter = aggregation.unwrapOrThrow(SqlSplittableAggFunction.class); if (!aggCall.e.getArgList().isEmpty() && fieldSet.contains(ImmutableBitSet.of(aggCall.e.getArgList()))) { final RexNode singleton = splitter.singleton(rexBuilder, joinInput.getRowType(), aggCall.e.transform(mapping)); if (singleton instanceof RexInputRef) { final int index = ((RexInputRef) singleton).getIndex(); if (!belowAggregateKey.get(index)) { projects.add(singleton); side.split.put(aggCall.i, projects.size() - 1); } else { side.split.put(aggCall.i, index); } } else { projects.add(singleton); side.split.put(aggCall.i, projects.size() - 1); } } } relBuilder.project(projects); side.newInput = relBuilder.build(); } else { side.aggregate = true; List<AggregateCall> belowAggCalls = new ArrayList<>(); final SqlSplittableAggFunction.Registry<AggregateCall> belowAggCallRegistry = registry(belowAggCalls); final int oldGroupKeyCount = aggregate.getGroupCount(); final int newGroupKeyCount = belowAggregateKey.cardinality(); for (Ord<AggregateCall> aggCall : Ord.zip(aggregate.getAggCallList())) { final SqlAggFunction aggregation = aggCall.e.getAggregation(); final SqlSplittableAggFunction splitter = aggregation.unwrapOrThrow(SqlSplittableAggFunction.class); final AggregateCall call1; if (fieldSet.contains(ImmutableBitSet.of(aggCall.e.getArgList()))) { final AggregateCall splitCall = splitter.split(aggCall.e, mapping); call1 = splitCall.adaptTo(joinInput, splitCall.getArgList(), splitCall.filterArg, oldGroupKeyCount, newGroupKeyCount); } else { call1 = splitter.other(rexBuilder.getTypeFactory(), aggCall.e); } if (call1 != null) { side.split.put(aggCall.i, belowAggregateKey.cardinality() + belowAggCallRegistry.register(call1)); } } side.newInput = relBuilder.push(joinInput) .aggregate(relBuilder.groupKey(belowAggregateKey), belowAggCalls) .build(); } offset += fieldCount; belowOffset += side.newInput.getRowType().getFieldCount(); sides.add(side); } if (uniqueCount == 2) { // Both inputs to the join are unique. There is nothing to be gained by // this rule. In fact, this aggregate+join may be the result of a previous // invocation of this rule; if we continue we might loop forever. return; } // Update condition final Mapping mapping = (Mapping) Mappings.target( map::get, join.getRowType().getFieldCount(), belowOffset); final RexNode newCondition = RexUtil.apply(mapping, join.getCondition()); // Create new join RelNode side0 = requireNonNull(sides.get(0).newInput, "sides.get(0).newInput"); relBuilder.push(side0) .push(requireNonNull(sides.get(1).newInput, "sides.get(1).newInput")) .join(join.getJoinType(), newCondition); // Aggregate above to sum up the sub-totals final List<AggregateCall> newAggCalls = new ArrayList<>(); final int groupCount = aggregate.getGroupCount(); final int newLeftWidth = side0.getRowType().getFieldCount(); final List<RexNode> projects = new ArrayList<>( rexBuilder.identityProjects(relBuilder.peek().getRowType())); for (Ord<AggregateCall> aggCall : Ord.zip(aggregate.getAggCallList())) { final SqlAggFunction aggregation = aggCall.e.getAggregation(); final SqlSplittableAggFunction splitter = aggregation.unwrapOrThrow(SqlSplittableAggFunction.class); final Integer leftSubTotal = sides.get(0).split.get(aggCall.i); final Integer rightSubTotal = sides.get(1).split.get(aggCall.i); newAggCalls.add( splitter.topSplit(rexBuilder, registry(projects), groupCount, relBuilder.peek().getRowType(), aggCall.e, leftSubTotal == null ? -1 : leftSubTotal, rightSubTotal == null ? -1 : rightSubTotal + newLeftWidth)); } relBuilder.project(projects); boolean aggConvertedToProjects = false; if (allColumnsInAggregate && join.getJoinType() != JoinRelType.FULL) { // let's see if we can convert aggregate into projects // This shouldn't be done for FULL OUTER JOIN, aggregate on top is always required List<RexNode> projects2 = new ArrayList<>(); for (int key : Mappings.apply(mapping, aggregate.getGroupSet())) { projects2.add(relBuilder.field(key)); } for (AggregateCall newAggCall : newAggCalls) { newAggCall.getAggregation().maybeUnwrap(SqlSplittableAggFunction.class) .ifPresent(splitter -> { final RelDataType rowType = relBuilder.peek().getRowType(); projects2.add(splitter.singleton(rexBuilder, rowType, newAggCall)); }); } if (projects2.size() == aggregate.getGroupSet().cardinality() + newAggCalls.size()) { // We successfully converted agg calls into projects. relBuilder.project(projects2); aggConvertedToProjects = true; } } if (!aggConvertedToProjects) { relBuilder.aggregate( relBuilder.groupKey(Mappings.apply(mapping, aggregate.getGroupSet()), Mappings.apply2(mapping, aggregate.getGroupSets())), newAggCalls); } call.transformTo(relBuilder.build()); } /** Computes the closure of a set of columns according to a given list of * constraints. Each 'x = y' constraint causes bit y to be set if bit x is * set, and vice versa. */ private static ImmutableBitSet keyColumns(ImmutableBitSet aggregateColumns, ImmutableList<RexNode> predicates) { NavigableMap<Integer, BitSet> equivalence = new TreeMap<>(); for (RexNode predicate : predicates) { populateEquivalences(equivalence, predicate); } ImmutableBitSet keyColumns = aggregateColumns; for (Integer aggregateColumn : aggregateColumns) { final BitSet bitSet = equivalence.get(aggregateColumn); if (bitSet != null) { keyColumns = keyColumns.union(bitSet); } } return keyColumns; } private static void populateEquivalences(Map<Integer, BitSet> equivalence, RexNode predicate) { switch (predicate.getKind()) { case EQUALS: RexCall call = (RexCall) predicate; final List<RexNode> operands = call.getOperands(); if (operands.get(0) instanceof RexInputRef) { final RexInputRef ref0 = (RexInputRef) operands.get(0); if (operands.get(1) instanceof RexInputRef) { final RexInputRef ref1 = (RexInputRef) operands.get(1); populateEquivalence(equivalence, ref0.getIndex(), ref1.getIndex()); populateEquivalence(equivalence, ref1.getIndex(), ref0.getIndex()); } } break; default: break; } } private static void populateEquivalence(Map<Integer, BitSet> equivalence, int i0, int i1) { BitSet bitSet = equivalence.get(i0); if (bitSet == null) { bitSet = new BitSet(); equivalence.put(i0, bitSet); } bitSet.set(i1); } /** Creates a {@link org.apache.calcite.sql.SqlSplittableAggFunction.Registry} * that is a view of a list. */ private static <E> SqlSplittableAggFunction.Registry<E> registry( final List<E> list) { return e -> { int i = list.indexOf(e); if (i < 0) { i = list.size(); list.add(e); } return i; }; } /** Work space for an input to a join. */ private static class Side { final Map<Integer, Integer> split = new HashMap<>(); @Nullable RelNode newInput; boolean aggregate; } /** Rule configuration. */ @Value.Immutable public interface Config extends RelRule.Config { Config DEFAULT = ImmutableAggregateJoinTransposeRule.Config.of() .withOperandFor(LogicalAggregate.class, LogicalJoin.class, false); /** Extended instance that can push down aggregate functions. */ Config EXTENDED = ImmutableAggregateJoinTransposeRule.Config.of() .withOperandFor(LogicalAggregate.class, LogicalJoin.class, true); @Override default AggregateJoinTransposeRule toRule() { return new AggregateJoinTransposeRule(this); } /** Whether to push down aggregate functions, default false. */ @Value.Default default boolean isAllowFunctions() { return false; } /** Sets {@link #isAllowFunctions()}. */ Config withAllowFunctions(boolean allowFunctions); /** Defines an operand tree for the given classes, and also sets * {@link #isAllowFunctions()}. */ default Config withOperandFor(Class<? extends Aggregate> aggregateClass, Class<? extends Join> joinClass, boolean allowFunctions) { return withAllowFunctions(allowFunctions) .withOperandSupplier(b0 -> b0.operand(aggregateClass) .predicate(agg -> isAggregateSupported(agg, allowFunctions)) .oneInput(b1 -> b1.operand(joinClass).anyInputs())) .as(Config.class); } } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license */ package org.lwjgl.demo.stb; import org.lwjgl.BufferUtils; import org.lwjgl.glfw.*; import org.lwjgl.openal.AL; import org.lwjgl.openal.ALC; import org.lwjgl.openal.ALCCapabilities; import org.lwjgl.opengl.GL; import org.lwjgl.opengl.GLUtil; import org.lwjgl.stb.STBVorbisInfo; import org.lwjgl.system.Callback; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.IntBuffer; import java.nio.ShortBuffer; import static java.lang.Math.*; import static org.lwjgl.demo.util.IOUtil.*; import static org.lwjgl.glfw.Callbacks.*; import static org.lwjgl.glfw.GLFW.*; import static org.lwjgl.openal.AL10.*; import static org.lwjgl.openal.ALC10.*; import static org.lwjgl.opengl.GL11.*; import static org.lwjgl.stb.STBEasyFont.*; import static org.lwjgl.stb.STBVorbis.*; import static org.lwjgl.system.MemoryUtil.*; /** * STB Vorbis demo. * * <p>Playback will pause while handling window events. In a real application, this can be fixed by running the decoder in a different thread.</p> */ public final class Vorbis { private Vorbis() { } public static void main(String[] args) { String filePath; if ( args.length == 0 ) { System.out.println("Use 'ant demo -Dclass=org.lwjgl.demo.stb.Vorbis -Dargs=<path>' to load a different Ogg Vorbis file.\n"); filePath = "demo/phero.ogg"; } else filePath = args[0]; long device = alcOpenDevice((ByteBuffer)null); if ( device == NULL ) throw new IllegalStateException("Failed to open the default device."); ALCCapabilities deviceCaps = ALC.createCapabilities(device); long context = alcCreateContext(device, (IntBuffer)null); if ( context == NULL ) throw new IllegalStateException("Failed to create an OpenAL context."); alcMakeContextCurrent(context); AL.createCapabilities(deviceCaps); int source = alGenSources(); IntBuffer buffers = BufferUtils.createIntBuffer(2); alGenBuffers(buffers); Decoder decoder = null; Renderer renderer = null; try { decoder = new Decoder(filePath); renderer = new Renderer(decoder, "STB Vorbis Demo"); long window = renderer.window; if ( !decoder.play(source, buffers) ) { System.err.println("Playback failed."); glfwSetWindowShouldClose(window, true); } while ( !glfwWindowShouldClose(window) ) { if ( !renderer.paused && !decoder.update(source, true) ) { System.err.println("Playback failed."); glfwSetWindowShouldClose(window, true); } float progress = decoder.getProgress(); float time = decoder.getProgressTime(progress); renderer.render(progress, time); } } finally { if ( renderer != null ) renderer.destroy(); if ( decoder != null ) stb_vorbis_close(decoder.handle); alDeleteBuffers(buffers); alDeleteSources(source); alcDestroyContext(context); alcCloseDevice(device); } } private static class Decoder { private static final int BUFFER_SIZE = 1024 * 4; final ByteBuffer vorbis; final long handle; final int channels; final int sampleRate; final int format; final int lengthSamples; final float lengthSeconds; final ShortBuffer pcm; int samplesLeft; Decoder(String filePath) { try { vorbis = ioResourceToByteBuffer(filePath, 256 * 1024); } catch (IOException e) { throw new RuntimeException(e); } IntBuffer error = BufferUtils.createIntBuffer(1); handle = stb_vorbis_open_memory(vorbis, error, null); if ( handle == NULL ) throw new RuntimeException("Failed to open Ogg Vorbis file. Error: " + error.get(0)); try ( STBVorbisInfo info = STBVorbisInfo.malloc() ) { Decoder.getInfo(handle, info); this.channels = info.channels(); this.sampleRate = info.sample_rate(); } this.format = getFormat(channels); this.lengthSamples = stb_vorbis_stream_length_in_samples(handle); this.lengthSeconds = stb_vorbis_stream_length_in_seconds(handle); this.pcm = BufferUtils.createShortBuffer(BUFFER_SIZE); samplesLeft = lengthSamples; } private static void getInfo(long decoder, STBVorbisInfo info) { System.out.println("stream length, samples: " + stb_vorbis_stream_length_in_samples(decoder)); System.out.println("stream length, seconds: " + stb_vorbis_stream_length_in_seconds(decoder)); System.out.println(); stb_vorbis_get_info(decoder, info); System.out.println("channels = " + info.channels()); System.out.println("sampleRate = " + info.sample_rate()); System.out.println("maxFrameSize = " + info.max_frame_size()); System.out.println("setupMemoryRequired = " + info.setup_memory_required()); System.out.println("setupTempMemoryRequired() = " + info.setup_temp_memory_required()); System.out.println("tempMemoryRequired = " + info.temp_memory_required()); } private static int getFormat(int channels) { switch ( channels ) { case 1: return AL_FORMAT_MONO16; case 2: return AL_FORMAT_STEREO16; default: throw new UnsupportedOperationException("Unsupported number of channels: " + channels); } } private boolean stream(int buffer) { int samples = 0; while ( samples < BUFFER_SIZE ) { pcm.position(samples); int samplesPerChannel = stb_vorbis_get_samples_short_interleaved(handle, channels, pcm); if ( samplesPerChannel == 0 ) break; samples += samplesPerChannel * channels; } if ( samples == 0 ) return false; pcm.position(0); alBufferData(buffer, format, pcm, sampleRate); samplesLeft -= samples / channels; return true; } float getProgress() { return 1.0f - samplesLeft / (float)(lengthSamples); } float getProgressTime(float progress) { return progress * lengthSeconds; } void rewind() { stb_vorbis_seek_start(handle); samplesLeft = lengthSamples; } void skip(int direction) { seek(min(max(0, stb_vorbis_get_sample_offset(handle) + direction * sampleRate), lengthSamples)); } void skipTo(float offset0to1) { seek(round(lengthSamples * offset0to1)); } private void seek(int sample_number) { stb_vorbis_seek(handle, sample_number); samplesLeft = lengthSamples - sample_number; } boolean play(int source, IntBuffer buffers) { for ( int i = 0; i < buffers.limit(); i++ ) { if ( !stream(buffers.get(i)) ) return false; } alSourceQueueBuffers(source, buffers); alSourcePlay(source); return true; } boolean update(int source, boolean loop) { int processed = alGetSourcei(source, AL_BUFFERS_PROCESSED); for ( int i = 0; i < processed; i++ ) { int buffer = alSourceUnqueueBuffers(source); if ( !stream(buffer) ) { boolean shouldExit = true; if ( loop ) { rewind(); shouldExit = !stream(buffer); } if ( shouldExit ) return false; } alSourceQueueBuffers(source, buffer); } if ( processed == 2 ) alSourcePlay(source); return true; } } private static class Renderer { private static final int WIDTH = 640; private static final int HEIGHT = 320; private final GLFWErrorCallback errorCallback; private final GLFWFramebufferSizeCallback framebufferSizeCallback; private final GLFWKeyCallback keyCallback; private final GLFWCursorPosCallback cursorPosCallback; private final GLFWMouseButtonCallback mouseButtonCallback; private final Callback debugProc; private final long window; private final ByteBuffer charBuffer; private boolean paused; private double cursorX, cursorY; private boolean buttonPressed; Renderer(Decoder decoder, String title) { errorCallback = GLFWErrorCallback.createPrint().set(); if ( !glfwInit() ) throw new IllegalStateException("Unable to initialize GLFW"); glfwDefaultWindowHints(); glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE); glfwWindowHint(GLFW_RESIZABLE, GLFW_FALSE); window = glfwCreateWindow(WIDTH, HEIGHT, title, NULL, NULL); if ( window == NULL ) throw new RuntimeException("Failed to create the GLFW window"); framebufferSizeCallback = new GLFWFramebufferSizeCallback() { @Override public void invoke(long window, int width, int height) { glViewport(0, 0, width, height); } }.set(window); keyCallback = new GLFWKeyCallback() { @Override public void invoke(long window, int key, int scancode, int action, int mods) { if ( action == GLFW_RELEASE ) return; switch ( key ) { case GLFW_KEY_ESCAPE: glfwSetWindowShouldClose(window, true); break; case GLFW_KEY_HOME: decoder.rewind(); break; case GLFW_KEY_LEFT: decoder.skip(-1); break; case GLFW_KEY_RIGHT: decoder.skip(1); break; case GLFW_KEY_SPACE: paused = !paused; break; } } }.set(window); mouseButtonCallback = new GLFWMouseButtonCallback() { @Override public void invoke(long window, int button, int action, int mods) { if ( button != GLFW_MOUSE_BUTTON_LEFT ) return; buttonPressed = action == GLFW_PRESS; if ( !buttonPressed ) return; seek(decoder); } }.set(window); cursorPosCallback = new GLFWCursorPosCallback() { @Override public void invoke(long window, double xpos, double ypos) { cursorX = xpos - WIDTH * 0.5f; cursorY = ypos - HEIGHT * 0.5f; if ( buttonPressed ) seek(decoder); } }.set(window); // Center window GLFWVidMode vidmode = glfwGetVideoMode(glfwGetPrimaryMonitor()); glfwSetWindowPos( window, (vidmode.width() - WIDTH) / 2, (vidmode.height() - HEIGHT) / 2 ); // Create context glfwMakeContextCurrent(window); GL.createCapabilities(); debugProc = GLUtil.setupDebugMessageCallback(); glfwSwapInterval(1); glfwShowWindow(window); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0.0, WIDTH, HEIGHT, 0.0, -1.0, 1.0); glMatrixMode(GL_MODELVIEW); charBuffer = BufferUtils.createByteBuffer(256 * 270); glEnableClientState(GL_VERTEX_ARRAY); glVertexPointer(2, GL_FLOAT, 16, charBuffer); glClearColor(43f / 255f, 43f / 255f, 43f / 255f, 0f); // BG color } private void seek(Decoder decoder) { if ( cursorX < -254.0 || 254.0 < cursorX ) return; if ( cursorY < -30.0 || 30.0 < cursorY ) return; decoder.skipTo((float)((cursorX + 254.0) / 508.0)); } void render(float progress, float time) { glfwPollEvents(); glClear(GL_COLOR_BUFFER_BIT); // Progress bar glPushMatrix(); glTranslatef(WIDTH * 0.5f, HEIGHT * 0.5f, 0.0f); glBegin(GL_QUADS); { glColor3f(0.5f * 43f / 255f, 0.5f * 43f / 255f, 0.5f * 43f / 255f); glVertex2f(-256.0f, -32.0f); glVertex2f(256.0f, -32.0f); glVertex2f(256.0f, 32.0f); glVertex2f(-256.0f, 32.0f); glColor3f(0.5f, 0.5f, 0.0f); glVertex2f(-254.0f, -30.0f); glVertex2f(-254.0f + progress * 508.0f, -30.0f); glVertex2f(-254.0f + progress * 508.0f, 30.0f); glVertex2f(-254.0f, 30.0f); } glEnd(); glPopMatrix(); glColor3f(169f / 255f, 183f / 255f, 198f / 255f); // Text color // Progress text int minutes = (int)floor(time / 60.0f); int seconds = (int)floor((time - minutes * 60.0f)); int quads = stb_easy_font_print(WIDTH * 0.5f - 13, HEIGHT * 0.5f - 4, String.format("%02d:%02d", minutes, seconds), null, charBuffer); glDrawArrays(GL_QUADS, 0, quads * 4); // HUD quads = stb_easy_font_print(4, 4, "Press HOME to rewind", null, charBuffer); glDrawArrays(GL_QUADS, 0, quads * 4); quads = stb_easy_font_print(4, 20, "Press LEFT/RIGHT or LMB to seek", null, charBuffer); glDrawArrays(GL_QUADS, 0, quads * 4); quads = stb_easy_font_print(4, 36, "Press SPACE to pause/resume", null, charBuffer); glDrawArrays(GL_QUADS, 0, quads * 4); glfwSwapBuffers(window); } void destroy() { if ( debugProc != null ) debugProc.free(); glfwFreeCallbacks(window); glfwDestroyWindow(window); glfwTerminate(); glfwSetErrorCallback(null).free(); } } }
/* Copyright 2007-2009 Selenium committers Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.openqa.selenium.support.pagefactory; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.SearchContext; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.ui.Clock; import org.openqa.selenium.support.ui.SlowLoadableComponent; import org.openqa.selenium.support.ui.SystemClock; import com.google.common.collect.Lists; import java.lang.reflect.Field; import java.util.List; /** * An element locator that will wait for the specified number of seconds for an element to appear, * rather than failing instantly if it's not present. This works by polling the UI on a regular * basis. The element returned will be present on the DOM, but may not actually be visible: override * {@link #isElementUsable(WebElement)} if this is important to you. * * Because this class polls the interface on a regular basis, it is strongly recommended that users * avoid locating elements by XPath. */ public class AjaxElementLocator extends DefaultElementLocator { protected final int timeOutInSeconds; private final Clock clock; /** * Main constructor. * * @param searchContext The context to use when finding the element * @param field The field representing this element * @param timeOutInSeconds How long to wait for the element to appear. Measured in seconds. */ public AjaxElementLocator(SearchContext searchContext, Field field, int timeOutInSeconds) { this(new SystemClock(), searchContext, field, timeOutInSeconds); } public AjaxElementLocator(Clock clock, SearchContext searchContext, Field field, int timeOutInSeconds) { super(searchContext, field); this.timeOutInSeconds = timeOutInSeconds; this.clock = clock; } /** * {@inheritDoc} * * Will poll the interface on a regular basis until the element is present. */ @Override public WebElement findElement() { SlowLoadingElement loadingElement = new SlowLoadingElement(clock, timeOutInSeconds); try { return loadingElement.get().getElement(); } catch (NoSuchElementError e) { throw new NoSuchElementException( String.format("Timed out after %d seconds. %s", timeOutInSeconds, e.getMessage()), e.getCause()); } } /** * {@inheritDoc} * * Will poll the interface on a regular basis until at least one element is present. */ @Override public List<WebElement> findElements() { SlowLoadingElementList list = new SlowLoadingElementList(clock, timeOutInSeconds); try { return list.get().getElements(); } catch (NoSuchElementError e) { return Lists.newArrayList(); } } /** * By default, we sleep for 250ms between polls. You may override this method in order to change * how it sleeps. * * @return Duration to sleep in milliseconds */ protected long sleepFor() { return 250; } /** * By default, elements are considered "found" if they are in the DOM. Override this method in * order to change whether or not you consider the element loaded. For example, perhaps you need * the element to be displayed: * * <pre>{@code * return element.isDisplayed(); * }</pre> * * @param element The element to use * @return Whether or not it meets your criteria for "found" */ protected boolean isElementUsable(WebElement element) { return true; } private class SlowLoadingElement extends SlowLoadableComponent<SlowLoadingElement> { private NoSuchElementException lastException; private WebElement element; public SlowLoadingElement(Clock clock, int timeOutInSeconds) { super(clock, timeOutInSeconds); } @Override protected void load() { // Does nothing } @Override protected long sleepFor() { return AjaxElementLocator.this.sleepFor(); } @Override protected void isLoaded() throws Error { try { element = AjaxElementLocator.super.findElement(); if (!isElementUsable(element)) { throw new NoSuchElementException("Element is not usable"); } } catch (NoSuchElementException e) { lastException = e; // Should use JUnit's AssertionError, but it may not be present throw new NoSuchElementError("Unable to locate the element", e); } } public NoSuchElementException getLastException() { return lastException; } public WebElement getElement() { return element; } } private class SlowLoadingElementList extends SlowLoadableComponent<SlowLoadingElementList> { private NoSuchElementException lastException; private List<WebElement> elements; public SlowLoadingElementList(Clock clock, int timeOutInSeconds) { super(clock, timeOutInSeconds); } @Override protected void load() { // Does nothing } @Override protected long sleepFor() { return AjaxElementLocator.this.sleepFor(); } @Override protected void isLoaded() throws Error { try { elements = AjaxElementLocator.super.findElements(); if (elements.size() == 0) { throw new NoSuchElementException("Unable to locate the element"); } for (WebElement element : elements) { if (!isElementUsable(element)) { throw new NoSuchElementException("Element is not usable"); } } } catch (NoSuchElementException e) { lastException = e; // Should use JUnit's AssertionError, but it may not be present throw new NoSuchElementError("Unable to locate the element", e); } } public NoSuchElementException getLastException() { return lastException; } public List<WebElement> getElements() { return elements; } } private static class NoSuchElementError extends Error { private NoSuchElementError(String message, Throwable throwable) { super(message, throwable); } } }
package edu.washington.escience.myria.operator; import java.util.Arrays; import java.util.List; import java.util.Objects; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.gs.collections.api.block.procedure.primitive.IntProcedure; import com.gs.collections.impl.list.mutable.primitive.IntArrayList; import com.gs.collections.impl.map.mutable.primitive.IntObjectHashMap; import edu.washington.escience.myria.DbException; import edu.washington.escience.myria.Schema; import edu.washington.escience.myria.Type; import edu.washington.escience.myria.column.Column; import edu.washington.escience.myria.storage.MutableTupleBuffer; import edu.washington.escience.myria.storage.TupleBatch; import edu.washington.escience.myria.storage.TupleBatchBuffer; import edu.washington.escience.myria.storage.TupleUtils; import edu.washington.escience.myria.util.HashUtils; /** * This is an implementation of hash equal join. The same as in DupElim, this implementation does not keep the * references to the incoming TupleBatches in order to get better memory performance. */ public final class SymmetricHashCountingJoin extends BinaryOperator { /** Required for Java serialization. */ private static final long serialVersionUID = 1L; /** The column indices for comparing of left child. */ private final int[] leftCompareIndx; /** The column indices for comparing of right child. */ private final int[] rightCompareIndx; /** A hash table for tuples from left child. {Hashcode -> List of tuple indices with the same hash code} */ private transient IntObjectHashMap<IntArrayList> leftHashTableIndices; /** A hash table for tuples from right child. {Hashcode -> List of tuple indices with the same hash code} */ private transient IntObjectHashMap<IntArrayList> rightHashTableIndices; /** The buffer holding the valid tuples from left. */ private transient MutableTupleBuffer leftHashTable; /** The buffer holding the valid tuples from right. */ private transient MutableTupleBuffer rightHashTable; /** How many times each key occurred from left. */ private transient IntArrayList occuredTimesOnLeft; /** How many times each key occurred from right. */ private transient IntArrayList occuredTimesOnRight; /** The number of join output tuples so far. */ private long ans; /** The buffer for storing and returning answer. */ private transient TupleBatchBuffer ansTBB; /** The name of the single column output from this operator. */ private final String columnName; /** * Traverse through the list of tuples. */ private transient CountingJoinProcedure doCountingJoin; /** * Whether this operator has returned answer or not. */ private boolean hasReturnedAnswer = false; /** * Traverse through the list of tuples with the same hash code. */ private final class CountingJoinProcedure implements IntProcedure { /** serial version id. */ private static final long serialVersionUID = 1L; /** * Hash table. */ private MutableTupleBuffer joinAgainstHashTable; /** * times of occure of a key. */ private IntArrayList occuredTimesOnJoinAgainstChild; /** * Join columns in the input. */ private int[] inputCmpColumns; /** * Join columns in the other table. */ private int[] otherCmpColumns; /** * row index of the tuple. */ private int row; /** * input TupleBatch. */ private TupleBatch inputTB; @Override public void value(final int index) { if (TupleUtils.tupleEquals(inputTB, inputCmpColumns, row, joinAgainstHashTable, otherCmpColumns, index)) { ans += occuredTimesOnJoinAgainstChild.get(index); } } }; /** * Construct a {@link SymmetricHashCountingJoin}. * * @param left the left child. * @param right the right child. * @param compareIndx1 the columns of the left child to be compared with the right. Order matters. * @param compareIndx2 the columns of the right child to be compared with the left. Order matters. * @throw IllegalArgumentException if there are duplicated column names from the children. */ public SymmetricHashCountingJoin(final Operator left, final Operator right, final int[] compareIndx1, final int[] compareIndx2) { this("count", left, right, compareIndx1, compareIndx2); } /** * Construct a {@link SymmetricHashCountingJoin} operator with schema specified. * * @param outputColumnName the name of the column of the output table. * @param left the left child. * @param right the right child. * @param compareIndx1 the columns of the left child to be compared with the right. Order matters. * @param compareIndx2 the columns of the right child to be compared with the left. Order matters. * @throw IllegalArgumentException if there are duplicated column names in <tt>outputSchema</tt>, or if * <tt>outputSchema</tt> does not have the correct number of columns and column types. */ public SymmetricHashCountingJoin(final String outputColumnName, final Operator left, final Operator right, final int[] compareIndx1, final int[] compareIndx2) { super(left, right); leftCompareIndx = compareIndx1; rightCompareIndx = compareIndx2; columnName = Objects.requireNonNull(outputColumnName); } /** * consume EOI from Child 1. reset the child's EOI to false 2. record the EOI in childrenEOI[] * * @param fromLeft true if consuming eoi from left child, false if consuming eoi from right child */ private void consumeChildEOI(final boolean fromLeft) { final Operator left = getLeft(); final Operator right = getRight(); if (fromLeft) { Preconditions.checkArgument(left.eoi()); left.setEOI(false); childrenEOI[0] = true; } else { Preconditions.checkArgument(right.eoi()); right.setEOI(false); childrenEOI[1] = true; } } /** * Note: If this operator is ready for EOS, this function will return true since EOS is a special EOI. * * @return whether this operator is ready to set itself EOI */ private boolean isEOIReady() { if ((childrenEOI[0] || getLeft().eos()) && (childrenEOI[1] || getRight().eos())) { return true; } return false; } @Override protected void cleanup() throws DbException { leftHashTable = null; rightHashTable = null; occuredTimesOnLeft = null; occuredTimesOnRight = null; leftHashTableIndices = null; rightHashTableIndices = null; ansTBB = null; ans = 0; } @Override public void checkEOSAndEOI() { final Operator left = getLeft(); final Operator right = getRight(); if (left.eos() && right.eos() && hasReturnedAnswer) { setEOS(); return; } // at the time of eos, this operator will not return any data, so it can be safely set EOI to true if ((childrenEOI[0] || left.eos()) && (childrenEOI[1] || right.eos()) && hasReturnedAnswer) { setEOI(true); Arrays.fill(childrenEOI, false); } } /** * Recording the EOI status of the children. */ private final boolean[] childrenEOI = new boolean[2]; @Override protected TupleBatch fetchNextReady() throws DbException { /** * There is no distinction between synchronous EOI and asynchronous EOI * */ final Operator left = getLeft(); final Operator right = getRight(); int numOfChildNoData = 0; while (numOfChildNoData < 2 && (!left.eos() || !right.eos())) { /* * If one of the children is already EOS, we need to set numOfChildNoData to 1 since "numOfChildNoData++" for this * child will not be called. */ if (left.eos() || right.eos()) { numOfChildNoData = 1; } else { numOfChildNoData = 0; } /* process tuple from left child */ if (!left.eos()) { TupleBatch leftTB = left.nextReady(); if (leftTB != null) { // process the data that is pulled from left child processChildTB(leftTB, true); } else { /* if left eoi, consume it, check whether it will cause EOI of this operator */ if (left.eoi()) { consumeChildEOI(true); /* * If this operator is ready to emit EOI ( reminder that it might need to clear buffer), break to EOI handle * part */ if (isEOIReady()) { break; } } numOfChildNoData++; } } /* process tuple from right child */ if (!right.eos()) { TupleBatch rightTB = right.nextReady(); if (rightTB != null) { // process the data that is pulled from right child processChildTB(rightTB, false); } else { /* if right eoi, consume it, check whether it will cause EOI of this operator */ if (right.eoi()) { consumeChildEOI(false); /* * If this operator is ready to emit EOI ( reminder that it might need to clear buffer), break to EOI handle * part */ if (isEOIReady()) { break; } } numOfChildNoData++; } } } /* * If the operator is ready to EOI, just set EOI since EOI will not return any data. If the operator is ready to * EOS, return answer first, then at the next round set EOS */ if (isEOIReady()) { if (left.eos() && right.eos() && (!hasReturnedAnswer)) { hasReturnedAnswer = true; ansTBB.putLong(0, ans); return ansTBB.popAny(); } } return null; } @Override public void init(final ImmutableMap<String, Object> execEnvVars) throws DbException { leftHashTableIndices = new IntObjectHashMap<>(); rightHashTableIndices = new IntObjectHashMap<>(); occuredTimesOnLeft = new IntArrayList(); occuredTimesOnRight = new IntArrayList(); leftHashTable = new MutableTupleBuffer(getLeft().getSchema().getSubSchema(leftCompareIndx)); rightHashTable = new MutableTupleBuffer(getRight().getSchema().getSubSchema(rightCompareIndx)); ans = 0; ansTBB = new TupleBatchBuffer(getSchema()); doCountingJoin = new CountingJoinProcedure(); } /** * @param tb the incoming TupleBatch for processing join. * @param fromLeft if the tb is from left. */ protected void processChildTB(final TupleBatch tb, final boolean fromLeft) { final Operator left = getLeft(); final Operator right = getRight(); MutableTupleBuffer hashTable1Local = null; IntObjectHashMap<IntArrayList> hashTable1IndicesLocal = null; IntObjectHashMap<IntArrayList> hashTable2IndicesLocal = null; IntArrayList ownOccuredTimes = null; if (fromLeft) { hashTable1Local = leftHashTable; doCountingJoin.joinAgainstHashTable = rightHashTable; hashTable1IndicesLocal = leftHashTableIndices; hashTable2IndicesLocal = rightHashTableIndices; doCountingJoin.inputCmpColumns = leftCompareIndx; doCountingJoin.otherCmpColumns = rightCompareIndx; doCountingJoin.occuredTimesOnJoinAgainstChild = occuredTimesOnRight; ownOccuredTimes = occuredTimesOnLeft; } else { hashTable1Local = rightHashTable; doCountingJoin.joinAgainstHashTable = leftHashTable; hashTable1IndicesLocal = rightHashTableIndices; hashTable2IndicesLocal = leftHashTableIndices; doCountingJoin.inputCmpColumns = rightCompareIndx; doCountingJoin.otherCmpColumns = leftCompareIndx; doCountingJoin.occuredTimesOnJoinAgainstChild = occuredTimesOnLeft; ownOccuredTimes = occuredTimesOnRight; } doCountingJoin.inputTB = tb; if (left.eos() && !right.eos()) { /* * delete right child's hash table if the left child is EOS, since there will be no incoming tuples from right as * it will never be probed again. */ rightHashTableIndices = null; rightHashTable = null; } else if (right.eos() && !left.eos()) { /* * delete left child's hash table if the right child is EOS, since there will be no incoming tuples from left as * it will never be probed again. */ leftHashTableIndices = null; leftHashTable = null; } for (int row = 0; row < tb.numTuples(); ++row) { /* * update number of count of probing the other child's hash table. */ final int cntHashCode = HashUtils.hashSubRow(tb, doCountingJoin.inputCmpColumns, row); IntArrayList tuplesWithHashCode = hashTable2IndicesLocal.get(cntHashCode); if (tuplesWithHashCode != null) { doCountingJoin.row = row; tuplesWithHashCode.forEach(doCountingJoin); } if (hashTable1Local != null) { // only build hash table on two sides if none of the children is EOS updateHashTableAndOccureTimes(tb, row, cntHashCode, hashTable1Local, hashTable1IndicesLocal, doCountingJoin.inputCmpColumns, ownOccuredTimes); } } } @Override protected Schema generateSchema() { final Schema leftSchema = getLeft().getSchema(); final Schema rightSchema = getRight().getSchema(); /* Assert that the compare index types are the same. */ for (int i = 0; i < rightCompareIndx.length; ++i) { int leftIndex = leftCompareIndx[i]; int rightIndex = rightCompareIndx[i]; Type leftType = leftSchema.getColumnType(leftIndex); Type rightType = rightSchema.getColumnType(rightIndex); Preconditions.checkState(leftType == rightType, "column types do not match for join at index %s: left column type %s [%s] != right column type %s [%s]", i, leftIndex, leftType, rightIndex, rightType); } return Schema.of(ImmutableList.of(Type.LONG_TYPE), ImmutableList.of(columnName)); } /** * @param tb the source TupleBatch * @param row the row number of the to be processed tuple in the source TupleBatch * @param hashCode the hashCode of the to be processed tuple * @param hashTable the hash table to be updated * @param hashTableIndices the hash indices to be updated * @param compareColumns compareColumns of input tuple * @param occuredTimes occuredTimes array to be updated */ private void updateHashTableAndOccureTimes(final TupleBatch tb, final int row, final int hashCode, final MutableTupleBuffer hashTable, final IntObjectHashMap<IntArrayList> hashTableIndices, final int[] compareColumns, final IntArrayList occuredTimes) { /* get the index of the tuple's hash code corresponding to */ final int nextIndex = hashTable.numTuples(); IntArrayList tupleIndicesList = hashTableIndices.get(hashCode); /* create one is there is no such a index yet (there is no tuple with the same hash code has been processed ) */ if (tupleIndicesList == null) { tupleIndicesList = new IntArrayList(1); hashTableIndices.put(hashCode, tupleIndicesList); } Preconditions.checkArgument(hashTable.numColumns() == compareColumns.length); List<? extends Column<?>> inputColumns = tb.getDataColumns(); /* find whether this tuple's comparing key has occured before. If it is, only update occurred times */ boolean found = false; for (int i = 0; i < tupleIndicesList.size(); ++i) { int index = tupleIndicesList.get(i); if (TupleUtils.tupleEquals(tb, compareColumns, row, hashTable, index)) { occuredTimes.set(index, occuredTimes.get(index) + 1); found = true; break; } } if (!found) { tupleIndicesList.add(nextIndex); for (int column = 0; column < hashTable.numColumns(); ++column) { hashTable.put(column, inputColumns.get(compareColumns[column]), row); } occuredTimes.add(1); } } }
/* * Copyright 2006 Simon Raess * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sf.beep4j.internal; import net.sf.beep4j.ChannelHandler; import net.sf.beep4j.CloseChannelCallback; import net.sf.beep4j.CloseChannelRequest; import net.sf.beep4j.Message; import net.sf.beep4j.MessageStub; import net.sf.beep4j.NullReplyListener; import net.sf.beep4j.ReplyListener; import org.jmock.Mock; import org.jmock.MockObjectTestCase; import org.jmock.core.Invocation; import org.jmock.core.Stub; public class ChannelImplTest extends MockObjectTestCase { private static final String PROFILE = "http://www.example.org/profiles/echo"; private static final int CHANNEL = 1; private Mock sessionMock; private InternalSession session; private Mock callbackMock; private CloseChannelCallback callback; private Mock channelHandlerMock; private ChannelHandler channelHandler; @Override protected void setUp() throws Exception { super.setUp(); sessionMock = mock(InternalSession.class); session = (InternalSession) sessionMock.proxy(); callbackMock = mock(CloseChannelCallback.class); callback = (CloseChannelCallback) callbackMock.proxy(); channelHandlerMock = mock(ChannelHandler.class); channelHandler = (ChannelHandler) channelHandlerMock.proxy(); } private void assertIsAlive(InternalChannel channel) { assertTrue(channel.isAlive()); assertFalse(channel.isShuttingDown()); assertFalse(channel.isDead()); } private void assertIsShuttingDown(InternalChannel channel) { assertFalse(channel.isAlive()); assertTrue(channel.isShuttingDown()); assertFalse(channel.isDead()); } private void assertIsDead(InternalChannel channel) { assertFalse(channel.isAlive()); assertFalse(channel.isShuttingDown()); assertTrue(channel.isDead()); } public void testAcceptedCloseRequest() throws Exception { InternalChannel channel = new ChannelImpl(session, PROFILE, CHANNEL); channel.initChannel(channelHandler); // define expectations // TODO setup ordering constraints sessionMock.expects(once()).method("requestChannelClose") .with(eq(1), ANYTHING) .will(new CloseAcceptingStub(1)); callbackMock.expects(once()).method("closeAccepted"); channelHandlerMock.expects(once()).method("channelClosed"); // test channel.close(callback); assertIsDead(channel); try { channel.sendMessage(new MessageStub(), new NullReplyListener()); fail("sending messages in dead state must fail"); } catch (IllegalStateException e) { // expected } try { channel.close(callback); fail("closing a dead channel must fail"); } catch (IllegalStateException e) { // expected } verify(); } public void testDelayedAcceptedCloseRequest() throws Exception { InternalChannel channel = new ChannelImpl(session, PROFILE, CHANNEL); channel.initChannel(channelHandler); Message message = new MessageStub(); // define expectations // TODO setup ordering constraints ParameterCaptureStub<ReplyListener> capture = new ParameterCaptureStub<ReplyListener>(2, ReplyListener.class, null); sessionMock.expects(once()).method("sendMessage") .with(eq(1), same(message), ANYTHING) .will(capture); sessionMock.expects(once()).method("requestChannelClose") .with(eq(1), ANYTHING) .will(new CloseAcceptingStub(1)); callbackMock.expects(once()).method("closeAccepted"); channelHandlerMock.expects(once()).method("channelClosed"); // test channel.sendMessage(message, new NullReplyListener()); channel.close(callback); assertIsShuttingDown(channel); ReplyListener listener = capture.getParameter(); listener.receiveNUL(); assertIsDead(channel); } public void testDeclinedCloseRequest() throws Exception { InternalChannel channel = new ChannelImpl(session, PROFILE, CHANNEL); channel.initChannel(channelHandler); Message message = new MessageStub(); // define expectations // TODO: define ordering constraints sessionMock.expects(once()).method("requestChannelClose") .with(eq(1), ANYTHING) .will(new CloseDecliningStub(1, 550, "still working")); callbackMock.expects(once()).method("closeDeclined") .with(eq(550), eq("still working")); sessionMock.expects(once()).method("sendMessage") .with(eq(1), same(message), ANYTHING); // test channel.close(callback); assertIsAlive(channel); channel.sendMessage(message, new NullReplyListener()); } public void testDelayedDeclinedCloseRequest() throws Exception { InternalChannel channel = new ChannelImpl(session, PROFILE, CHANNEL); channel.initChannel(channelHandler); Message m1 = new MessageStub(); Message m2 = new MessageStub(); // define expectations // TODO: define ordering constraints ParameterCaptureStub<ReplyListener> capture = new ParameterCaptureStub<ReplyListener>(2, ReplyListener.class, null); sessionMock.expects(once()).method("sendMessage") .with(eq(1), same(m1), ANYTHING) .will(capture); sessionMock.expects(once()).method("requestChannelClose") .with(eq(1), ANYTHING) .will(new CloseDecliningStub(1, 550, "still working")); callbackMock.expects(once()).method("closeDeclined") .with(eq(550), eq("still working")); sessionMock.expects(once()).method("sendMessage") .with(eq(1), same(m2), ANYTHING); // test channel.sendMessage(m1, new NullReplyListener()); channel.close(callback); assertIsShuttingDown(channel); ReplyListener listener = capture.getParameter(); listener.receiveNUL(); assertIsAlive(channel); channel.sendMessage(m2, new NullReplyListener()); } public void testCloseRequestedAccepted() throws Exception { InternalChannel channel = new ChannelImpl(session, PROFILE, CHANNEL); ChannelHandler handler = channel.initChannel(channelHandler); // define expectations // TODO: define ordering constraints channelHandlerMock.expects(once()).method("closeRequested") .with(ANYTHING) .will(new CloseAcceptingRequest(0)); channelHandlerMock.expects(once()).method("channelClosed"); Mock mock = mock(CloseChannelRequest.class); mock.expects(once()).method("accept"); CloseChannelRequest request = (CloseChannelRequest) mock.proxy(); // test handler.closeRequested(request); assertIsDead(channel); } public void testDelayedCloseRequestedAccepted() throws Exception { InternalChannel channel = new ChannelImpl(session, PROFILE, CHANNEL); ChannelHandler handler = channel.initChannel(channelHandler); Message message = new MessageStub(); ParameterCaptureStub<ReplyListener> capture = new ParameterCaptureStub<ReplyListener>(2, ReplyListener.class, null); // define expectations // TODO: define ordering constraints sessionMock.expects(once()).method("sendMessage") .with(eq(1), same(message), ANYTHING) .will(capture); channelHandlerMock.expects(once()).method("closeRequested") .with(ANYTHING) .will(new CloseAcceptingRequest(0)); channelHandlerMock.expects(once()).method("channelClosed"); Mock mock = mock(CloseChannelRequest.class); mock.expects(once()).method("accept"); CloseChannelRequest request = (CloseChannelRequest) mock.proxy(); // test channel.sendMessage(message, new NullReplyListener()); handler.closeRequested(request); assertIsShuttingDown(channel); ReplyListener listener = capture.getParameter(); listener.receiveNUL(); assertIsDead(channel); } public void testCloseRequestedDeclined() throws Exception { InternalChannel channel = new ChannelImpl(session, PROFILE, CHANNEL); ChannelHandler handler = channel.initChannel(channelHandler); // define expectations // TODO: define ordering constraints channelHandlerMock.expects(once()).method("closeRequested") .with(ANYTHING) .will(new CloseRejectingRequest(0)); Mock mock = mock(CloseChannelRequest.class); mock.expects(once()).method("reject"); CloseChannelRequest request = (CloseChannelRequest) mock.proxy(); // test handler.closeRequested(request); assertIsAlive(channel); } public void testDelayedCloseRequestedDeclined() throws Exception { InternalChannel channel = new ChannelImpl(session, PROFILE, CHANNEL); ChannelHandler handler = channel.initChannel(channelHandler); Message m1 = new MessageStub(); Message m2 = new MessageStub(); ParameterCaptureStub<ReplyListener> capture = new ParameterCaptureStub<ReplyListener>(2, ReplyListener.class, null); // define expectations // TODO: define ordering constraints sessionMock.expects(once()).method("sendMessage") .with(eq(1), same(m1), ANYTHING) .will(capture); channelHandlerMock.expects(once()).method("closeRequested") .with(ANYTHING) .will(new CloseRejectingRequest(0)); sessionMock.expects(once()).method("sendMessage") .with(eq(1), same(m2), ANYTHING); Mock mock = mock(CloseChannelRequest.class); mock.expects(once()).method("reject"); CloseChannelRequest request = (CloseChannelRequest) mock.proxy(); // test channel.sendMessage(m1, new NullReplyListener()); handler.closeRequested(request); assertIsShuttingDown(channel); ReplyListener listener = capture.getParameter(); listener.receiveNUL(); assertIsAlive(channel); channel.sendMessage(m2, new NullReplyListener()); } private static class CloseAcceptingStub implements Stub { private final int index; private CloseAcceptingStub(int index) { this.index = index; } public StringBuffer describeTo(StringBuffer buf) { buf.append("stub[accept close request]"); return buf; } public Object invoke(Invocation invocation) throws Throwable { CloseChannelCallback callback = (CloseChannelCallback) invocation.parameterValues.get(index); callback.closeAccepted(); return null; } } private static class CloseAcceptingRequest implements Stub { private final int index; private CloseAcceptingRequest(int index) { this.index = index; } public StringBuffer describeTo(StringBuffer buf) { buf.append("stub[accept close request]"); return buf; } public Object invoke(Invocation invocation) throws Throwable { CloseChannelRequest callback = (CloseChannelRequest) invocation.parameterValues.get(index); callback.accept(); return null; } } private static class CloseDecliningStub implements Stub { private final int code; private final String message; private final int index; private CloseDecliningStub(int index, int code, String message) { this.code = code; this.message = message; this.index = index; } public StringBuffer describeTo(StringBuffer buf) { buf.append("stub[decline close request]"); return buf; } public Object invoke(Invocation invocation) throws Throwable { CloseChannelCallback callback = (CloseChannelCallback) invocation.parameterValues.get(index); callback.closeDeclined(code, message); return null; } } private static class CloseRejectingRequest implements Stub { private final int index; private CloseRejectingRequest(int index) { this.index = index; } public StringBuffer describeTo(StringBuffer buf) { buf.append("stub[decline close request]"); return buf; } public Object invoke(Invocation invocation) throws Throwable { CloseChannelRequest callback = (CloseChannelRequest) invocation.parameterValues.get(index); callback.reject(); return null; } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.datacatalog.v1.stub; import static com.google.cloud.datacatalog.v1.DataCatalogClient.ListEntriesPagedResponse; import static com.google.cloud.datacatalog.v1.DataCatalogClient.ListEntryGroupsPagedResponse; import static com.google.cloud.datacatalog.v1.DataCatalogClient.ListTagsPagedResponse; import static com.google.cloud.datacatalog.v1.DataCatalogClient.SearchCatalogPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.datacatalog.v1.Contacts; import com.google.cloud.datacatalog.v1.CreateEntryGroupRequest; import com.google.cloud.datacatalog.v1.CreateEntryRequest; import com.google.cloud.datacatalog.v1.CreateTagRequest; import com.google.cloud.datacatalog.v1.CreateTagTemplateFieldRequest; import com.google.cloud.datacatalog.v1.CreateTagTemplateRequest; import com.google.cloud.datacatalog.v1.DeleteEntryGroupRequest; import com.google.cloud.datacatalog.v1.DeleteEntryRequest; import com.google.cloud.datacatalog.v1.DeleteTagRequest; import com.google.cloud.datacatalog.v1.DeleteTagTemplateFieldRequest; import com.google.cloud.datacatalog.v1.DeleteTagTemplateRequest; import com.google.cloud.datacatalog.v1.Entry; import com.google.cloud.datacatalog.v1.EntryGroup; import com.google.cloud.datacatalog.v1.EntryOverview; import com.google.cloud.datacatalog.v1.GetEntryGroupRequest; import com.google.cloud.datacatalog.v1.GetEntryRequest; import com.google.cloud.datacatalog.v1.GetTagTemplateRequest; import com.google.cloud.datacatalog.v1.ListEntriesRequest; import com.google.cloud.datacatalog.v1.ListEntriesResponse; import com.google.cloud.datacatalog.v1.ListEntryGroupsRequest; import com.google.cloud.datacatalog.v1.ListEntryGroupsResponse; import com.google.cloud.datacatalog.v1.ListTagsRequest; import com.google.cloud.datacatalog.v1.ListTagsResponse; import com.google.cloud.datacatalog.v1.LookupEntryRequest; import com.google.cloud.datacatalog.v1.ModifyEntryContactsRequest; import com.google.cloud.datacatalog.v1.ModifyEntryOverviewRequest; import com.google.cloud.datacatalog.v1.RenameTagTemplateFieldEnumValueRequest; import com.google.cloud.datacatalog.v1.RenameTagTemplateFieldRequest; import com.google.cloud.datacatalog.v1.SearchCatalogRequest; import com.google.cloud.datacatalog.v1.SearchCatalogResponse; import com.google.cloud.datacatalog.v1.SearchCatalogResult; import com.google.cloud.datacatalog.v1.StarEntryRequest; import com.google.cloud.datacatalog.v1.StarEntryResponse; import com.google.cloud.datacatalog.v1.Tag; import com.google.cloud.datacatalog.v1.TagTemplate; import com.google.cloud.datacatalog.v1.TagTemplateField; import com.google.cloud.datacatalog.v1.UnstarEntryRequest; import com.google.cloud.datacatalog.v1.UnstarEntryResponse; import com.google.cloud.datacatalog.v1.UpdateEntryGroupRequest; import com.google.cloud.datacatalog.v1.UpdateEntryRequest; import com.google.cloud.datacatalog.v1.UpdateTagRequest; import com.google.cloud.datacatalog.v1.UpdateTagTemplateFieldRequest; import com.google.cloud.datacatalog.v1.UpdateTagTemplateRequest; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.iam.v1.GetIamPolicyRequest; import com.google.iam.v1.Policy; import com.google.iam.v1.SetIamPolicyRequest; import com.google.iam.v1.TestIamPermissionsRequest; import com.google.iam.v1.TestIamPermissionsResponse; import com.google.protobuf.Empty; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link DataCatalogStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (datacatalog.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of createEntryGroup to 30 seconds: * * <pre>{@code * DataCatalogStubSettings.Builder dataCatalogSettingsBuilder = * DataCatalogStubSettings.newBuilder(); * dataCatalogSettingsBuilder * .createEntryGroupSettings() * .setRetrySettings( * dataCatalogSettingsBuilder * .createEntryGroupSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * DataCatalogStubSettings dataCatalogSettings = dataCatalogSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class DataCatalogStubSettings extends StubSettings<DataCatalogStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build(); private final PagedCallSettings< SearchCatalogRequest, SearchCatalogResponse, SearchCatalogPagedResponse> searchCatalogSettings; private final UnaryCallSettings<CreateEntryGroupRequest, EntryGroup> createEntryGroupSettings; private final UnaryCallSettings<GetEntryGroupRequest, EntryGroup> getEntryGroupSettings; private final UnaryCallSettings<UpdateEntryGroupRequest, EntryGroup> updateEntryGroupSettings; private final UnaryCallSettings<DeleteEntryGroupRequest, Empty> deleteEntryGroupSettings; private final PagedCallSettings< ListEntryGroupsRequest, ListEntryGroupsResponse, ListEntryGroupsPagedResponse> listEntryGroupsSettings; private final UnaryCallSettings<CreateEntryRequest, Entry> createEntrySettings; private final UnaryCallSettings<UpdateEntryRequest, Entry> updateEntrySettings; private final UnaryCallSettings<DeleteEntryRequest, Empty> deleteEntrySettings; private final UnaryCallSettings<GetEntryRequest, Entry> getEntrySettings; private final UnaryCallSettings<LookupEntryRequest, Entry> lookupEntrySettings; private final PagedCallSettings<ListEntriesRequest, ListEntriesResponse, ListEntriesPagedResponse> listEntriesSettings; private final UnaryCallSettings<ModifyEntryOverviewRequest, EntryOverview> modifyEntryOverviewSettings; private final UnaryCallSettings<ModifyEntryContactsRequest, Contacts> modifyEntryContactsSettings; private final UnaryCallSettings<CreateTagTemplateRequest, TagTemplate> createTagTemplateSettings; private final UnaryCallSettings<GetTagTemplateRequest, TagTemplate> getTagTemplateSettings; private final UnaryCallSettings<UpdateTagTemplateRequest, TagTemplate> updateTagTemplateSettings; private final UnaryCallSettings<DeleteTagTemplateRequest, Empty> deleteTagTemplateSettings; private final UnaryCallSettings<CreateTagTemplateFieldRequest, TagTemplateField> createTagTemplateFieldSettings; private final UnaryCallSettings<UpdateTagTemplateFieldRequest, TagTemplateField> updateTagTemplateFieldSettings; private final UnaryCallSettings<RenameTagTemplateFieldRequest, TagTemplateField> renameTagTemplateFieldSettings; private final UnaryCallSettings<RenameTagTemplateFieldEnumValueRequest, TagTemplateField> renameTagTemplateFieldEnumValueSettings; private final UnaryCallSettings<DeleteTagTemplateFieldRequest, Empty> deleteTagTemplateFieldSettings; private final UnaryCallSettings<CreateTagRequest, Tag> createTagSettings; private final UnaryCallSettings<UpdateTagRequest, Tag> updateTagSettings; private final UnaryCallSettings<DeleteTagRequest, Empty> deleteTagSettings; private final PagedCallSettings<ListTagsRequest, ListTagsResponse, ListTagsPagedResponse> listTagsSettings; private final UnaryCallSettings<StarEntryRequest, StarEntryResponse> starEntrySettings; private final UnaryCallSettings<UnstarEntryRequest, UnstarEntryResponse> unstarEntrySettings; private final UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings; private final UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings; private final UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings; private static final PagedListDescriptor< SearchCatalogRequest, SearchCatalogResponse, SearchCatalogResult> SEARCH_CATALOG_PAGE_STR_DESC = new PagedListDescriptor< SearchCatalogRequest, SearchCatalogResponse, SearchCatalogResult>() { @Override public String emptyToken() { return ""; } @Override public SearchCatalogRequest injectToken(SearchCatalogRequest payload, String token) { return SearchCatalogRequest.newBuilder(payload).setPageToken(token).build(); } @Override public SearchCatalogRequest injectPageSize(SearchCatalogRequest payload, int pageSize) { return SearchCatalogRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(SearchCatalogRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(SearchCatalogResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<SearchCatalogResult> extractResources(SearchCatalogResponse payload) { return payload.getResultsList() == null ? ImmutableList.<SearchCatalogResult>of() : payload.getResultsList(); } }; private static final PagedListDescriptor< ListEntryGroupsRequest, ListEntryGroupsResponse, EntryGroup> LIST_ENTRY_GROUPS_PAGE_STR_DESC = new PagedListDescriptor<ListEntryGroupsRequest, ListEntryGroupsResponse, EntryGroup>() { @Override public String emptyToken() { return ""; } @Override public ListEntryGroupsRequest injectToken( ListEntryGroupsRequest payload, String token) { return ListEntryGroupsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListEntryGroupsRequest injectPageSize( ListEntryGroupsRequest payload, int pageSize) { return ListEntryGroupsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListEntryGroupsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListEntryGroupsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<EntryGroup> extractResources(ListEntryGroupsResponse payload) { return payload.getEntryGroupsList() == null ? ImmutableList.<EntryGroup>of() : payload.getEntryGroupsList(); } }; private static final PagedListDescriptor<ListEntriesRequest, ListEntriesResponse, Entry> LIST_ENTRIES_PAGE_STR_DESC = new PagedListDescriptor<ListEntriesRequest, ListEntriesResponse, Entry>() { @Override public String emptyToken() { return ""; } @Override public ListEntriesRequest injectToken(ListEntriesRequest payload, String token) { return ListEntriesRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListEntriesRequest injectPageSize(ListEntriesRequest payload, int pageSize) { return ListEntriesRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListEntriesRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListEntriesResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Entry> extractResources(ListEntriesResponse payload) { return payload.getEntriesList() == null ? ImmutableList.<Entry>of() : payload.getEntriesList(); } }; private static final PagedListDescriptor<ListTagsRequest, ListTagsResponse, Tag> LIST_TAGS_PAGE_STR_DESC = new PagedListDescriptor<ListTagsRequest, ListTagsResponse, Tag>() { @Override public String emptyToken() { return ""; } @Override public ListTagsRequest injectToken(ListTagsRequest payload, String token) { return ListTagsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListTagsRequest injectPageSize(ListTagsRequest payload, int pageSize) { return ListTagsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListTagsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListTagsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<Tag> extractResources(ListTagsResponse payload) { return payload.getTagsList() == null ? ImmutableList.<Tag>of() : payload.getTagsList(); } }; private static final PagedListResponseFactory< SearchCatalogRequest, SearchCatalogResponse, SearchCatalogPagedResponse> SEARCH_CATALOG_PAGE_STR_FACT = new PagedListResponseFactory< SearchCatalogRequest, SearchCatalogResponse, SearchCatalogPagedResponse>() { @Override public ApiFuture<SearchCatalogPagedResponse> getFuturePagedResponse( UnaryCallable<SearchCatalogRequest, SearchCatalogResponse> callable, SearchCatalogRequest request, ApiCallContext context, ApiFuture<SearchCatalogResponse> futureResponse) { PageContext<SearchCatalogRequest, SearchCatalogResponse, SearchCatalogResult> pageContext = PageContext.create(callable, SEARCH_CATALOG_PAGE_STR_DESC, request, context); return SearchCatalogPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListEntryGroupsRequest, ListEntryGroupsResponse, ListEntryGroupsPagedResponse> LIST_ENTRY_GROUPS_PAGE_STR_FACT = new PagedListResponseFactory< ListEntryGroupsRequest, ListEntryGroupsResponse, ListEntryGroupsPagedResponse>() { @Override public ApiFuture<ListEntryGroupsPagedResponse> getFuturePagedResponse( UnaryCallable<ListEntryGroupsRequest, ListEntryGroupsResponse> callable, ListEntryGroupsRequest request, ApiCallContext context, ApiFuture<ListEntryGroupsResponse> futureResponse) { PageContext<ListEntryGroupsRequest, ListEntryGroupsResponse, EntryGroup> pageContext = PageContext.create(callable, LIST_ENTRY_GROUPS_PAGE_STR_DESC, request, context); return ListEntryGroupsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListEntriesRequest, ListEntriesResponse, ListEntriesPagedResponse> LIST_ENTRIES_PAGE_STR_FACT = new PagedListResponseFactory< ListEntriesRequest, ListEntriesResponse, ListEntriesPagedResponse>() { @Override public ApiFuture<ListEntriesPagedResponse> getFuturePagedResponse( UnaryCallable<ListEntriesRequest, ListEntriesResponse> callable, ListEntriesRequest request, ApiCallContext context, ApiFuture<ListEntriesResponse> futureResponse) { PageContext<ListEntriesRequest, ListEntriesResponse, Entry> pageContext = PageContext.create(callable, LIST_ENTRIES_PAGE_STR_DESC, request, context); return ListEntriesPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListTagsRequest, ListTagsResponse, ListTagsPagedResponse> LIST_TAGS_PAGE_STR_FACT = new PagedListResponseFactory<ListTagsRequest, ListTagsResponse, ListTagsPagedResponse>() { @Override public ApiFuture<ListTagsPagedResponse> getFuturePagedResponse( UnaryCallable<ListTagsRequest, ListTagsResponse> callable, ListTagsRequest request, ApiCallContext context, ApiFuture<ListTagsResponse> futureResponse) { PageContext<ListTagsRequest, ListTagsResponse, Tag> pageContext = PageContext.create(callable, LIST_TAGS_PAGE_STR_DESC, request, context); return ListTagsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to searchCatalog. */ public PagedCallSettings<SearchCatalogRequest, SearchCatalogResponse, SearchCatalogPagedResponse> searchCatalogSettings() { return searchCatalogSettings; } /** Returns the object with the settings used for calls to createEntryGroup. */ public UnaryCallSettings<CreateEntryGroupRequest, EntryGroup> createEntryGroupSettings() { return createEntryGroupSettings; } /** Returns the object with the settings used for calls to getEntryGroup. */ public UnaryCallSettings<GetEntryGroupRequest, EntryGroup> getEntryGroupSettings() { return getEntryGroupSettings; } /** Returns the object with the settings used for calls to updateEntryGroup. */ public UnaryCallSettings<UpdateEntryGroupRequest, EntryGroup> updateEntryGroupSettings() { return updateEntryGroupSettings; } /** Returns the object with the settings used for calls to deleteEntryGroup. */ public UnaryCallSettings<DeleteEntryGroupRequest, Empty> deleteEntryGroupSettings() { return deleteEntryGroupSettings; } /** Returns the object with the settings used for calls to listEntryGroups. */ public PagedCallSettings< ListEntryGroupsRequest, ListEntryGroupsResponse, ListEntryGroupsPagedResponse> listEntryGroupsSettings() { return listEntryGroupsSettings; } /** Returns the object with the settings used for calls to createEntry. */ public UnaryCallSettings<CreateEntryRequest, Entry> createEntrySettings() { return createEntrySettings; } /** Returns the object with the settings used for calls to updateEntry. */ public UnaryCallSettings<UpdateEntryRequest, Entry> updateEntrySettings() { return updateEntrySettings; } /** Returns the object with the settings used for calls to deleteEntry. */ public UnaryCallSettings<DeleteEntryRequest, Empty> deleteEntrySettings() { return deleteEntrySettings; } /** Returns the object with the settings used for calls to getEntry. */ public UnaryCallSettings<GetEntryRequest, Entry> getEntrySettings() { return getEntrySettings; } /** Returns the object with the settings used for calls to lookupEntry. */ public UnaryCallSettings<LookupEntryRequest, Entry> lookupEntrySettings() { return lookupEntrySettings; } /** Returns the object with the settings used for calls to listEntries. */ public PagedCallSettings<ListEntriesRequest, ListEntriesResponse, ListEntriesPagedResponse> listEntriesSettings() { return listEntriesSettings; } /** Returns the object with the settings used for calls to modifyEntryOverview. */ public UnaryCallSettings<ModifyEntryOverviewRequest, EntryOverview> modifyEntryOverviewSettings() { return modifyEntryOverviewSettings; } /** Returns the object with the settings used for calls to modifyEntryContacts. */ public UnaryCallSettings<ModifyEntryContactsRequest, Contacts> modifyEntryContactsSettings() { return modifyEntryContactsSettings; } /** Returns the object with the settings used for calls to createTagTemplate. */ public UnaryCallSettings<CreateTagTemplateRequest, TagTemplate> createTagTemplateSettings() { return createTagTemplateSettings; } /** Returns the object with the settings used for calls to getTagTemplate. */ public UnaryCallSettings<GetTagTemplateRequest, TagTemplate> getTagTemplateSettings() { return getTagTemplateSettings; } /** Returns the object with the settings used for calls to updateTagTemplate. */ public UnaryCallSettings<UpdateTagTemplateRequest, TagTemplate> updateTagTemplateSettings() { return updateTagTemplateSettings; } /** Returns the object with the settings used for calls to deleteTagTemplate. */ public UnaryCallSettings<DeleteTagTemplateRequest, Empty> deleteTagTemplateSettings() { return deleteTagTemplateSettings; } /** Returns the object with the settings used for calls to createTagTemplateField. */ public UnaryCallSettings<CreateTagTemplateFieldRequest, TagTemplateField> createTagTemplateFieldSettings() { return createTagTemplateFieldSettings; } /** Returns the object with the settings used for calls to updateTagTemplateField. */ public UnaryCallSettings<UpdateTagTemplateFieldRequest, TagTemplateField> updateTagTemplateFieldSettings() { return updateTagTemplateFieldSettings; } /** Returns the object with the settings used for calls to renameTagTemplateField. */ public UnaryCallSettings<RenameTagTemplateFieldRequest, TagTemplateField> renameTagTemplateFieldSettings() { return renameTagTemplateFieldSettings; } /** Returns the object with the settings used for calls to renameTagTemplateFieldEnumValue. */ public UnaryCallSettings<RenameTagTemplateFieldEnumValueRequest, TagTemplateField> renameTagTemplateFieldEnumValueSettings() { return renameTagTemplateFieldEnumValueSettings; } /** Returns the object with the settings used for calls to deleteTagTemplateField. */ public UnaryCallSettings<DeleteTagTemplateFieldRequest, Empty> deleteTagTemplateFieldSettings() { return deleteTagTemplateFieldSettings; } /** Returns the object with the settings used for calls to createTag. */ public UnaryCallSettings<CreateTagRequest, Tag> createTagSettings() { return createTagSettings; } /** Returns the object with the settings used for calls to updateTag. */ public UnaryCallSettings<UpdateTagRequest, Tag> updateTagSettings() { return updateTagSettings; } /** Returns the object with the settings used for calls to deleteTag. */ public UnaryCallSettings<DeleteTagRequest, Empty> deleteTagSettings() { return deleteTagSettings; } /** Returns the object with the settings used for calls to listTags. */ public PagedCallSettings<ListTagsRequest, ListTagsResponse, ListTagsPagedResponse> listTagsSettings() { return listTagsSettings; } /** Returns the object with the settings used for calls to starEntry. */ public UnaryCallSettings<StarEntryRequest, StarEntryResponse> starEntrySettings() { return starEntrySettings; } /** Returns the object with the settings used for calls to unstarEntry. */ public UnaryCallSettings<UnstarEntryRequest, UnstarEntryResponse> unstarEntrySettings() { return unstarEntrySettings; } /** Returns the object with the settings used for calls to setIamPolicy. */ public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() { return setIamPolicySettings; } /** Returns the object with the settings used for calls to getIamPolicy. */ public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() { return getIamPolicySettings; } /** Returns the object with the settings used for calls to testIamPermissions. */ public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return testIamPermissionsSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public DataCatalogStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcDataCatalogStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "datacatalog.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "datacatalog.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(DataCatalogStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected DataCatalogStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); searchCatalogSettings = settingsBuilder.searchCatalogSettings().build(); createEntryGroupSettings = settingsBuilder.createEntryGroupSettings().build(); getEntryGroupSettings = settingsBuilder.getEntryGroupSettings().build(); updateEntryGroupSettings = settingsBuilder.updateEntryGroupSettings().build(); deleteEntryGroupSettings = settingsBuilder.deleteEntryGroupSettings().build(); listEntryGroupsSettings = settingsBuilder.listEntryGroupsSettings().build(); createEntrySettings = settingsBuilder.createEntrySettings().build(); updateEntrySettings = settingsBuilder.updateEntrySettings().build(); deleteEntrySettings = settingsBuilder.deleteEntrySettings().build(); getEntrySettings = settingsBuilder.getEntrySettings().build(); lookupEntrySettings = settingsBuilder.lookupEntrySettings().build(); listEntriesSettings = settingsBuilder.listEntriesSettings().build(); modifyEntryOverviewSettings = settingsBuilder.modifyEntryOverviewSettings().build(); modifyEntryContactsSettings = settingsBuilder.modifyEntryContactsSettings().build(); createTagTemplateSettings = settingsBuilder.createTagTemplateSettings().build(); getTagTemplateSettings = settingsBuilder.getTagTemplateSettings().build(); updateTagTemplateSettings = settingsBuilder.updateTagTemplateSettings().build(); deleteTagTemplateSettings = settingsBuilder.deleteTagTemplateSettings().build(); createTagTemplateFieldSettings = settingsBuilder.createTagTemplateFieldSettings().build(); updateTagTemplateFieldSettings = settingsBuilder.updateTagTemplateFieldSettings().build(); renameTagTemplateFieldSettings = settingsBuilder.renameTagTemplateFieldSettings().build(); renameTagTemplateFieldEnumValueSettings = settingsBuilder.renameTagTemplateFieldEnumValueSettings().build(); deleteTagTemplateFieldSettings = settingsBuilder.deleteTagTemplateFieldSettings().build(); createTagSettings = settingsBuilder.createTagSettings().build(); updateTagSettings = settingsBuilder.updateTagSettings().build(); deleteTagSettings = settingsBuilder.deleteTagSettings().build(); listTagsSettings = settingsBuilder.listTagsSettings().build(); starEntrySettings = settingsBuilder.starEntrySettings().build(); unstarEntrySettings = settingsBuilder.unstarEntrySettings().build(); setIamPolicySettings = settingsBuilder.setIamPolicySettings().build(); getIamPolicySettings = settingsBuilder.getIamPolicySettings().build(); testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build(); } /** Builder for DataCatalogStubSettings. */ public static class Builder extends StubSettings.Builder<DataCatalogStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< SearchCatalogRequest, SearchCatalogResponse, SearchCatalogPagedResponse> searchCatalogSettings; private final UnaryCallSettings.Builder<CreateEntryGroupRequest, EntryGroup> createEntryGroupSettings; private final UnaryCallSettings.Builder<GetEntryGroupRequest, EntryGroup> getEntryGroupSettings; private final UnaryCallSettings.Builder<UpdateEntryGroupRequest, EntryGroup> updateEntryGroupSettings; private final UnaryCallSettings.Builder<DeleteEntryGroupRequest, Empty> deleteEntryGroupSettings; private final PagedCallSettings.Builder< ListEntryGroupsRequest, ListEntryGroupsResponse, ListEntryGroupsPagedResponse> listEntryGroupsSettings; private final UnaryCallSettings.Builder<CreateEntryRequest, Entry> createEntrySettings; private final UnaryCallSettings.Builder<UpdateEntryRequest, Entry> updateEntrySettings; private final UnaryCallSettings.Builder<DeleteEntryRequest, Empty> deleteEntrySettings; private final UnaryCallSettings.Builder<GetEntryRequest, Entry> getEntrySettings; private final UnaryCallSettings.Builder<LookupEntryRequest, Entry> lookupEntrySettings; private final PagedCallSettings.Builder< ListEntriesRequest, ListEntriesResponse, ListEntriesPagedResponse> listEntriesSettings; private final UnaryCallSettings.Builder<ModifyEntryOverviewRequest, EntryOverview> modifyEntryOverviewSettings; private final UnaryCallSettings.Builder<ModifyEntryContactsRequest, Contacts> modifyEntryContactsSettings; private final UnaryCallSettings.Builder<CreateTagTemplateRequest, TagTemplate> createTagTemplateSettings; private final UnaryCallSettings.Builder<GetTagTemplateRequest, TagTemplate> getTagTemplateSettings; private final UnaryCallSettings.Builder<UpdateTagTemplateRequest, TagTemplate> updateTagTemplateSettings; private final UnaryCallSettings.Builder<DeleteTagTemplateRequest, Empty> deleteTagTemplateSettings; private final UnaryCallSettings.Builder<CreateTagTemplateFieldRequest, TagTemplateField> createTagTemplateFieldSettings; private final UnaryCallSettings.Builder<UpdateTagTemplateFieldRequest, TagTemplateField> updateTagTemplateFieldSettings; private final UnaryCallSettings.Builder<RenameTagTemplateFieldRequest, TagTemplateField> renameTagTemplateFieldSettings; private final UnaryCallSettings.Builder< RenameTagTemplateFieldEnumValueRequest, TagTemplateField> renameTagTemplateFieldEnumValueSettings; private final UnaryCallSettings.Builder<DeleteTagTemplateFieldRequest, Empty> deleteTagTemplateFieldSettings; private final UnaryCallSettings.Builder<CreateTagRequest, Tag> createTagSettings; private final UnaryCallSettings.Builder<UpdateTagRequest, Tag> updateTagSettings; private final UnaryCallSettings.Builder<DeleteTagRequest, Empty> deleteTagSettings; private final PagedCallSettings.Builder< ListTagsRequest, ListTagsResponse, ListTagsPagedResponse> listTagsSettings; private final UnaryCallSettings.Builder<StarEntryRequest, StarEntryResponse> starEntrySettings; private final UnaryCallSettings.Builder<UnstarEntryRequest, UnstarEntryResponse> unstarEntrySettings; private final UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings; private final UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings; private final UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_3_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( "no_retry_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(60000L)) .setTotalTimeout(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_3_params", settings); settings = RetrySettings.newBuilder() .setInitialRpcTimeout(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(60000L)) .setTotalTimeout(Duration.ofMillis(60000L)) .build(); definitions.put("no_retry_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); searchCatalogSettings = PagedCallSettings.newBuilder(SEARCH_CATALOG_PAGE_STR_FACT); createEntryGroupSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getEntryGroupSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateEntryGroupSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteEntryGroupSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listEntryGroupsSettings = PagedCallSettings.newBuilder(LIST_ENTRY_GROUPS_PAGE_STR_FACT); createEntrySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateEntrySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteEntrySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getEntrySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); lookupEntrySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listEntriesSettings = PagedCallSettings.newBuilder(LIST_ENTRIES_PAGE_STR_FACT); modifyEntryOverviewSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); modifyEntryContactsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createTagTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getTagTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateTagTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteTagTemplateSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createTagTemplateFieldSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateTagTemplateFieldSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); renameTagTemplateFieldSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); renameTagTemplateFieldEnumValueSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteTagTemplateFieldSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createTagSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateTagSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteTagSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listTagsSettings = PagedCallSettings.newBuilder(LIST_TAGS_PAGE_STR_FACT); starEntrySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unstarEntrySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( searchCatalogSettings, createEntryGroupSettings, getEntryGroupSettings, updateEntryGroupSettings, deleteEntryGroupSettings, listEntryGroupsSettings, createEntrySettings, updateEntrySettings, deleteEntrySettings, getEntrySettings, lookupEntrySettings, listEntriesSettings, modifyEntryOverviewSettings, modifyEntryContactsSettings, createTagTemplateSettings, getTagTemplateSettings, updateTagTemplateSettings, deleteTagTemplateSettings, createTagTemplateFieldSettings, updateTagTemplateFieldSettings, renameTagTemplateFieldSettings, renameTagTemplateFieldEnumValueSettings, deleteTagTemplateFieldSettings, createTagSettings, updateTagSettings, deleteTagSettings, listTagsSettings, starEntrySettings, unstarEntrySettings, setIamPolicySettings, getIamPolicySettings, testIamPermissionsSettings); initDefaults(this); } protected Builder(DataCatalogStubSettings settings) { super(settings); searchCatalogSettings = settings.searchCatalogSettings.toBuilder(); createEntryGroupSettings = settings.createEntryGroupSettings.toBuilder(); getEntryGroupSettings = settings.getEntryGroupSettings.toBuilder(); updateEntryGroupSettings = settings.updateEntryGroupSettings.toBuilder(); deleteEntryGroupSettings = settings.deleteEntryGroupSettings.toBuilder(); listEntryGroupsSettings = settings.listEntryGroupsSettings.toBuilder(); createEntrySettings = settings.createEntrySettings.toBuilder(); updateEntrySettings = settings.updateEntrySettings.toBuilder(); deleteEntrySettings = settings.deleteEntrySettings.toBuilder(); getEntrySettings = settings.getEntrySettings.toBuilder(); lookupEntrySettings = settings.lookupEntrySettings.toBuilder(); listEntriesSettings = settings.listEntriesSettings.toBuilder(); modifyEntryOverviewSettings = settings.modifyEntryOverviewSettings.toBuilder(); modifyEntryContactsSettings = settings.modifyEntryContactsSettings.toBuilder(); createTagTemplateSettings = settings.createTagTemplateSettings.toBuilder(); getTagTemplateSettings = settings.getTagTemplateSettings.toBuilder(); updateTagTemplateSettings = settings.updateTagTemplateSettings.toBuilder(); deleteTagTemplateSettings = settings.deleteTagTemplateSettings.toBuilder(); createTagTemplateFieldSettings = settings.createTagTemplateFieldSettings.toBuilder(); updateTagTemplateFieldSettings = settings.updateTagTemplateFieldSettings.toBuilder(); renameTagTemplateFieldSettings = settings.renameTagTemplateFieldSettings.toBuilder(); renameTagTemplateFieldEnumValueSettings = settings.renameTagTemplateFieldEnumValueSettings.toBuilder(); deleteTagTemplateFieldSettings = settings.deleteTagTemplateFieldSettings.toBuilder(); createTagSettings = settings.createTagSettings.toBuilder(); updateTagSettings = settings.updateTagSettings.toBuilder(); deleteTagSettings = settings.deleteTagSettings.toBuilder(); listTagsSettings = settings.listTagsSettings.toBuilder(); starEntrySettings = settings.starEntrySettings.toBuilder(); unstarEntrySettings = settings.unstarEntrySettings.toBuilder(); setIamPolicySettings = settings.setIamPolicySettings.toBuilder(); getIamPolicySettings = settings.getIamPolicySettings.toBuilder(); testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( searchCatalogSettings, createEntryGroupSettings, getEntryGroupSettings, updateEntryGroupSettings, deleteEntryGroupSettings, listEntryGroupsSettings, createEntrySettings, updateEntrySettings, deleteEntrySettings, getEntrySettings, lookupEntrySettings, listEntriesSettings, modifyEntryOverviewSettings, modifyEntryContactsSettings, createTagTemplateSettings, getTagTemplateSettings, updateTagTemplateSettings, deleteTagTemplateSettings, createTagTemplateFieldSettings, updateTagTemplateFieldSettings, renameTagTemplateFieldSettings, renameTagTemplateFieldEnumValueSettings, deleteTagTemplateFieldSettings, createTagSettings, updateTagSettings, deleteTagSettings, listTagsSettings, starEntrySettings, unstarEntrySettings, setIamPolicySettings, getIamPolicySettings, testIamPermissionsSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .searchCatalogSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .createEntryGroupSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .getEntryGroupSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .updateEntryGroupSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .deleteEntryGroupSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .listEntryGroupsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .createEntrySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .updateEntrySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .deleteEntrySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .getEntrySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .lookupEntrySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .listEntriesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .modifyEntryOverviewSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .modifyEntryContactsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .createTagTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .getTagTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .updateTagTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .deleteTagTemplateSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .createTagTemplateFieldSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .updateTagTemplateFieldSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .renameTagTemplateFieldSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .renameTagTemplateFieldEnumValueSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .deleteTagTemplateFieldSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .createTagSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .updateTagSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .deleteTagSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .listTagsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .starEntrySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .unstarEntrySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .setIamPolicySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); builder .getIamPolicySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params")); builder .testIamPermissionsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to searchCatalog. */ public PagedCallSettings.Builder< SearchCatalogRequest, SearchCatalogResponse, SearchCatalogPagedResponse> searchCatalogSettings() { return searchCatalogSettings; } /** Returns the builder for the settings used for calls to createEntryGroup. */ public UnaryCallSettings.Builder<CreateEntryGroupRequest, EntryGroup> createEntryGroupSettings() { return createEntryGroupSettings; } /** Returns the builder for the settings used for calls to getEntryGroup. */ public UnaryCallSettings.Builder<GetEntryGroupRequest, EntryGroup> getEntryGroupSettings() { return getEntryGroupSettings; } /** Returns the builder for the settings used for calls to updateEntryGroup. */ public UnaryCallSettings.Builder<UpdateEntryGroupRequest, EntryGroup> updateEntryGroupSettings() { return updateEntryGroupSettings; } /** Returns the builder for the settings used for calls to deleteEntryGroup. */ public UnaryCallSettings.Builder<DeleteEntryGroupRequest, Empty> deleteEntryGroupSettings() { return deleteEntryGroupSettings; } /** Returns the builder for the settings used for calls to listEntryGroups. */ public PagedCallSettings.Builder< ListEntryGroupsRequest, ListEntryGroupsResponse, ListEntryGroupsPagedResponse> listEntryGroupsSettings() { return listEntryGroupsSettings; } /** Returns the builder for the settings used for calls to createEntry. */ public UnaryCallSettings.Builder<CreateEntryRequest, Entry> createEntrySettings() { return createEntrySettings; } /** Returns the builder for the settings used for calls to updateEntry. */ public UnaryCallSettings.Builder<UpdateEntryRequest, Entry> updateEntrySettings() { return updateEntrySettings; } /** Returns the builder for the settings used for calls to deleteEntry. */ public UnaryCallSettings.Builder<DeleteEntryRequest, Empty> deleteEntrySettings() { return deleteEntrySettings; } /** Returns the builder for the settings used for calls to getEntry. */ public UnaryCallSettings.Builder<GetEntryRequest, Entry> getEntrySettings() { return getEntrySettings; } /** Returns the builder for the settings used for calls to lookupEntry. */ public UnaryCallSettings.Builder<LookupEntryRequest, Entry> lookupEntrySettings() { return lookupEntrySettings; } /** Returns the builder for the settings used for calls to listEntries. */ public PagedCallSettings.Builder< ListEntriesRequest, ListEntriesResponse, ListEntriesPagedResponse> listEntriesSettings() { return listEntriesSettings; } /** Returns the builder for the settings used for calls to modifyEntryOverview. */ public UnaryCallSettings.Builder<ModifyEntryOverviewRequest, EntryOverview> modifyEntryOverviewSettings() { return modifyEntryOverviewSettings; } /** Returns the builder for the settings used for calls to modifyEntryContacts. */ public UnaryCallSettings.Builder<ModifyEntryContactsRequest, Contacts> modifyEntryContactsSettings() { return modifyEntryContactsSettings; } /** Returns the builder for the settings used for calls to createTagTemplate. */ public UnaryCallSettings.Builder<CreateTagTemplateRequest, TagTemplate> createTagTemplateSettings() { return createTagTemplateSettings; } /** Returns the builder for the settings used for calls to getTagTemplate. */ public UnaryCallSettings.Builder<GetTagTemplateRequest, TagTemplate> getTagTemplateSettings() { return getTagTemplateSettings; } /** Returns the builder for the settings used for calls to updateTagTemplate. */ public UnaryCallSettings.Builder<UpdateTagTemplateRequest, TagTemplate> updateTagTemplateSettings() { return updateTagTemplateSettings; } /** Returns the builder for the settings used for calls to deleteTagTemplate. */ public UnaryCallSettings.Builder<DeleteTagTemplateRequest, Empty> deleteTagTemplateSettings() { return deleteTagTemplateSettings; } /** Returns the builder for the settings used for calls to createTagTemplateField. */ public UnaryCallSettings.Builder<CreateTagTemplateFieldRequest, TagTemplateField> createTagTemplateFieldSettings() { return createTagTemplateFieldSettings; } /** Returns the builder for the settings used for calls to updateTagTemplateField. */ public UnaryCallSettings.Builder<UpdateTagTemplateFieldRequest, TagTemplateField> updateTagTemplateFieldSettings() { return updateTagTemplateFieldSettings; } /** Returns the builder for the settings used for calls to renameTagTemplateField. */ public UnaryCallSettings.Builder<RenameTagTemplateFieldRequest, TagTemplateField> renameTagTemplateFieldSettings() { return renameTagTemplateFieldSettings; } /** Returns the builder for the settings used for calls to renameTagTemplateFieldEnumValue. */ public UnaryCallSettings.Builder<RenameTagTemplateFieldEnumValueRequest, TagTemplateField> renameTagTemplateFieldEnumValueSettings() { return renameTagTemplateFieldEnumValueSettings; } /** Returns the builder for the settings used for calls to deleteTagTemplateField. */ public UnaryCallSettings.Builder<DeleteTagTemplateFieldRequest, Empty> deleteTagTemplateFieldSettings() { return deleteTagTemplateFieldSettings; } /** Returns the builder for the settings used for calls to createTag. */ public UnaryCallSettings.Builder<CreateTagRequest, Tag> createTagSettings() { return createTagSettings; } /** Returns the builder for the settings used for calls to updateTag. */ public UnaryCallSettings.Builder<UpdateTagRequest, Tag> updateTagSettings() { return updateTagSettings; } /** Returns the builder for the settings used for calls to deleteTag. */ public UnaryCallSettings.Builder<DeleteTagRequest, Empty> deleteTagSettings() { return deleteTagSettings; } /** Returns the builder for the settings used for calls to listTags. */ public PagedCallSettings.Builder<ListTagsRequest, ListTagsResponse, ListTagsPagedResponse> listTagsSettings() { return listTagsSettings; } /** Returns the builder for the settings used for calls to starEntry. */ public UnaryCallSettings.Builder<StarEntryRequest, StarEntryResponse> starEntrySettings() { return starEntrySettings; } /** Returns the builder for the settings used for calls to unstarEntry. */ public UnaryCallSettings.Builder<UnstarEntryRequest, UnstarEntryResponse> unstarEntrySettings() { return unstarEntrySettings; } /** Returns the builder for the settings used for calls to setIamPolicy. */ public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() { return setIamPolicySettings; } /** Returns the builder for the settings used for calls to getIamPolicy. */ public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() { return getIamPolicySettings; } /** Returns the builder for the settings used for calls to testIamPermissions. */ public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse> testIamPermissionsSettings() { return testIamPermissionsSettings; } @Override public DataCatalogStubSettings build() throws IOException { return new DataCatalogStubSettings(this); } } }
package com.jetbrains.env.python; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.testFramework.UsefulTestCase; import com.intellij.testFramework.fixtures.IdeaProjectTestFixture; import com.intellij.xdebugger.XDebuggerTestUtil; import com.intellij.xdebugger.breakpoints.SuspendPolicy; import com.jetbrains.TestEnv; import com.jetbrains.env.PyEnvTestCase; import com.jetbrains.env.PyProcessWithConsoleTestTask; import com.jetbrains.env.Staging; import com.jetbrains.env.StagingOn; import com.jetbrains.env.python.debug.PyDebuggerTask; import com.jetbrains.env.ut.PyUnitTestProcessRunner; import com.jetbrains.python.PythonHelpersLocator; import com.jetbrains.python.console.pydev.PydevCompletionVariant; import com.jetbrains.python.debugger.PyDebugValue; import com.jetbrains.python.debugger.PyDebuggerException; import com.jetbrains.python.debugger.PyExceptionBreakpointProperties; import com.jetbrains.python.debugger.PyExceptionBreakpointType; import com.jetbrains.python.debugger.pydev.PyDebugCallback; import com.jetbrains.python.debugger.settings.PyDebuggerSettings; import com.jetbrains.python.debugger.settings.PySteppingFilter; import com.jetbrains.python.sdk.flavors.PythonSdkFlavor; import com.jetbrains.python.sdkTools.SdkCreationType; import org.jetbrains.annotations.NotNull; import org.junit.Test; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Set; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; /** * @author traff */ public class PythonDebuggerTest extends PyEnvTestCase { @Test public void testBreakpointStopAndEval() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test1.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 3); } @Override public void testing() throws Exception { waitForPause(); eval("i").hasValue("0"); resume(); waitForPause(); eval("i").hasValue("1"); resume(); waitForPause(); eval("i").hasValue("2"); } }); } @Test @Staging public void testPydevTests_Debugger() { unittests("tests_pydevd_python/test_debugger.py"); } @Test @Staging public void testPydevMonkey() { unittests("tests_pydevd_python/test_pydev_monkey.py"); } private void unittests(final String script) { runPythonTest(new PyProcessWithConsoleTestTask<PyUnitTestProcessRunner>("/helpers/pydev", SdkCreationType.SDK_PACKAGES_ONLY) { @NotNull @Override protected PyUnitTestProcessRunner createProcessRunner() throws Exception { return new PyUnitTestProcessRunner(script, 0); } @NotNull @Override public String getTestDataPath() { return PythonHelpersLocator.getPythonCommunityPath(); } @Override protected void checkTestResults(@NotNull final PyUnitTestProcessRunner runner, @NotNull final String stdout, @NotNull final String stderr, @NotNull final String all) { runner.assertAllTestsPassed(); } }); } @Test public void testConditionalBreakpoint() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test1.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 3); XDebuggerTestUtil.setBreakpointCondition(getProject(), 3, "i == 1 or i == 11 or i == 111"); } @Override public void testing() throws Exception { waitForPause(); eval("i").hasValue("1"); resume(); waitForPause(); eval("i").hasValue("11"); resume(); waitForPause(); eval("i").hasValue("111"); } }); } @Test public void testDebugConsole() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test1.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 3); } @Override public void testing() throws Exception { waitForPause(); eval("i").hasValue("0"); resume(); waitForPause(); consoleExec("'i=%d'%i"); waitForOutput("'i=1'"); consoleExec("x"); waitForOutput("name 'x' is not defined"); consoleExec("1-;"); waitForOutput("SyntaxError"); resume(); } private void consoleExec(String command) { myDebugProcess.consoleExec(command, new PyDebugCallback<String>() { @Override public void ok(String value) { } @Override public void error(PyDebuggerException exception) { } }); } }); } @Test public void testDebugCompletion() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test4.py") { @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 3); } @Override public void testing() throws Exception { waitForPause(); List<PydevCompletionVariant> list = myDebugProcess.getCompletions("xvalu"); assertEquals(2, list.size()); } }); } @Test public void testBreakpointLogExpression() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test1.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 3); XDebuggerTestUtil.setBreakpointLogExpression(getProject(), 3, "'i = %d'%i"); } @Override public void testing() throws Exception { waitForPause(); resume(); waitForOutput("i = 1"); } }); } @Test public void testStepOver() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test2.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 5); } @Override public void testing() throws Exception { waitForPause(); stepOver(); waitForPause(); stepOver(); waitForPause(); eval("z").hasValue("2"); } }); } @Test @StagingOn(os = TestEnv.WINDOWS) public void testStepInto() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test2.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 5); } @Override public void testing() throws Exception { waitForPause(); stepInto(); waitForPause(); eval("x").hasValue("1"); stepOver(); waitForPause(); eval("y").hasValue("3"); stepOver(); waitForPause(); eval("z").hasValue("1"); } }); } @Test public void testStepIntoMyCode() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_my_code.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 5); toggleBreakpoint(getFilePath(getScriptName()), 7); } @Override public void testing() throws Exception { waitForPause(); stepIntoMyCode(); waitForPause(); eval("x").hasValue("2"); resume(); waitForPause(); eval("x").hasValue("3"); stepIntoMyCode(); waitForPause(); eval("stopped_in_user_file").hasValue("True"); } }); } @Test public void testSmartStepInto() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test3.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 14); } @Override public void testing() throws Exception { waitForPause(); smartStepInto("foo"); waitForPause(); stepOver(); waitForPause(); eval("y").hasValue("4"); } }); } @Test public void testSmartStepInto2() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test3.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 18); toggleBreakpoint(getFilePath(getScriptName()), 25); } @Override public void testing() throws Exception { waitForPause(); toggleBreakpoint(getFilePath(getScriptName()), 18); smartStepInto("foo"); waitForPause(); eval("a.z").hasValue("1"); } }); } @Test public void testInput() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_input.py") { @Override public void testing() throws Exception { waitForOutput("print command >"); input("GO!"); waitForOutput("command was GO!"); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-jython"); //can't run on jython } }); } @Test @StagingOn(os = TestEnv.WINDOWS) public void testRunToLine() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_runtoline.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 3); toggleBreakpoint(getFilePath(getScriptName()), 9); } @Override public void testing() throws Exception { waitForPause(); eval("x").hasValue("0"); runToLine(6); eval("x").hasValue("1"); resume(); waitForPause(); eval("x").hasValue("12"); resume(); waitForOutput("x = 12"); } }); } private static void addExceptionBreakpoint(IdeaProjectTestFixture fixture, PyExceptionBreakpointProperties properties) { XDebuggerTestUtil.addBreakpoint(fixture.getProject(), PyExceptionBreakpointType.class, properties); } @Test public void testExceptionBreakpointOnTerminate() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_exceptbreak.py") { @Override public void before() throws Exception { createExceptionBreakZeroDivisionError(myFixture, true, false, false); } @Override public void testing() throws Exception { waitForPause(); eval("__exception__[0].__name__").hasValue("'ZeroDivisionError'"); resume(); waitForTerminate(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-iron"); } }); } private static void createExceptionBreakZeroDivisionError(IdeaProjectTestFixture fixture, boolean notifyOnTerminate, boolean notifyOnFirst, boolean ignoreLibraries) { XDebuggerTestUtil.removeAllBreakpoints(fixture.getProject()); XDebuggerTestUtil.setDefaultBreakpointEnabled(fixture.getProject(), PyExceptionBreakpointType.class, false); PyExceptionBreakpointProperties properties = new PyExceptionBreakpointProperties("exceptions.ZeroDivisionError"); properties.setNotifyOnTerminate(notifyOnTerminate); properties.setNotifyOnlyOnFirst(notifyOnFirst); properties.setIgnoreLibraries(ignoreLibraries); addExceptionBreakpoint(fixture, properties); properties = new PyExceptionBreakpointProperties("builtins.ZeroDivisionError"); //for python 3 properties.setNotifyOnTerminate(notifyOnTerminate); properties.setNotifyOnlyOnFirst(notifyOnFirst); properties.setIgnoreLibraries(ignoreLibraries); addExceptionBreakpoint(fixture, properties); } @Test public void testExceptionBreakpointOnFirstRaise() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_exceptbreak.py") { @Override public void before() throws Exception { createExceptionBreakZeroDivisionError(myFixture, false, true, false); } @Override public void testing() throws Exception { waitForPause(); eval("__exception__[0].__name__").hasValue("'ZeroDivisionError'"); resume(); waitForTerminate(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-iron"); } }); } public static void createExceptionBreak(IdeaProjectTestFixture fixture, boolean notifyOnTerminate, boolean notifyOnFirst, boolean ignoreLibraries) { XDebuggerTestUtil.removeAllBreakpoints(fixture.getProject()); XDebuggerTestUtil.setDefaultBreakpointEnabled(fixture.getProject(), PyExceptionBreakpointType.class, false); PyExceptionBreakpointProperties properties = new PyExceptionBreakpointProperties("BaseException"); properties.setNotifyOnTerminate(notifyOnTerminate); properties.setNotifyOnlyOnFirst(notifyOnFirst); properties.setIgnoreLibraries(ignoreLibraries); addExceptionBreakpoint(fixture, properties); } @Test public void testExceptionBreakpointIgnoreLibrariesOnRaise() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_ignore_lib.py") { @Override public void before() throws Exception { createExceptionBreak(myFixture, false, true, true); } @Override public void testing() throws Exception { waitForPause(); eval("stopped_in_user_file").hasValue("True"); resume(); waitForTerminate(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-jython"); } }); } @Test public void testExceptionBreakpointIgnoreLibrariesOnTerminate() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_ignore_lib.py") { @Override public void before() throws Exception { createExceptionBreak(myFixture, true, false, true); } @Override public void testing() throws Exception { waitForPause(); eval("stopped_in_user_file").hasValue("True"); resume(); waitForTerminate(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-iron"); } }); } @Test public void testMultithreading() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_multithread.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 10); toggleBreakpoint(getFilePath(getScriptName()), 16); } @Override public void testing() throws Exception { waitForPause(); eval("y").hasValue("2"); resume(); waitForPause(); eval("z").hasValue("102"); resume(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-pypy"); //TODO: fix that for PyPy } }); } @Test @StagingOn(os = TestEnv.WINDOWS) public void testEggDebug() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_egg.py") { @Override public void before() throws Exception { String egg = getFilePath("Adder-0.1.egg"); toggleBreakpointInEgg(egg, "adder/adder.py", 2); PythonSdkFlavor flavor = PythonSdkFlavor.getFlavor(getRunConfiguration().getSdkHome()); if (flavor != null) { flavor.initPythonPath(Lists.newArrayList(egg), getRunConfiguration().getEnvs()); } else { getRunConfiguration().getEnvs().put("PYTHONPATH", egg); } } @Override public void testing() throws Exception { waitForPause(); eval("ret").hasValue("16"); resume(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-jython"); //TODO: fix that for Jython if anybody needs it } }); } @Test public void testWinEggDebug() throws Exception { if (UsefulTestCase.IS_UNDER_TEAMCITY && !SystemInfo.isWindows) { return; // Only needs to run on windows } runPythonTest(new PyDebuggerTask("/debug", "test_winegg.py") { @Override public void before() throws Exception { String egg = getFilePath("wintestegg-0.1.egg"); toggleBreakpointInEgg(egg, "eggxample/lower_case.py", 2); toggleBreakpointInEgg(egg, "eggxample/MIXED_case.py", 2); PythonSdkFlavor flavor = PythonSdkFlavor.getFlavor(getRunConfiguration().getSdkHome()); if (flavor != null) { flavor.initPythonPath(Lists.newArrayList(egg), getRunConfiguration().getEnvs()); } else { getRunConfiguration().getEnvs().put("PYTHONPATH", egg); } } @Override public void testing() throws Exception { waitForPause(); eval("ret").hasValue("16"); resume(); waitForPause(); eval("ret").hasValue("17"); resume(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-jython"); //TODO: fix that for Jython if anybody needs it } }); } @Test @StagingOn(os = TestEnv.WINDOWS) public void testWinLongName() throws Exception { if (UsefulTestCase.IS_UNDER_TEAMCITY && !SystemInfo.isWindows) { return; // Only needs to run on windows } runPythonTest(new PyDebuggerTask("/debug", "long_n~1.py") { @Override public void before() throws Exception { String scriptPath = getScriptName(); String longPath = FileUtil .toSystemDependentName((new File(scriptPath).getCanonicalPath())); LocalFileSystem.getInstance().refreshAndFindFileByPath(longPath); toggleBreakpoint(longPath, 2); } @Override public void testing() throws Exception { waitForPause(); eval("x").hasValue("10"); resume(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-jython"); //TODO: fix that for Jython if anybody needs it } }); } @Test public void testStepOverConditionalBreakpoint() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_stepOverCondition.py") { @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 1); toggleBreakpoint(getScriptName(), 2); XDebuggerTestUtil.setBreakpointCondition(getProject(), 2, "y == 3"); } @Override public void testing() throws Exception { waitForPause(); stepOver(); waitForPause(); eval("y").hasValue("2"); } }); } @Test public void testMultiprocess() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_multiprocess.py") { @Override protected void init() { setMultiprocessDebug(true); } @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 9); } @Override public void testing() throws Exception { waitForPause(); eval("i").hasValue("'Result:OK'"); resume(); waitForOutput("Result:OK"); } @NotNull @Override public Set<String> getTags() { return Sets.newHashSet("python3"); } }); } @Test @Staging public void testMultiprocessingSubprocess() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_multiprocess_args.py") { @Override protected void init() { setMultiprocessDebug(true); } @Override public void before() throws Exception { toggleBreakpoint(getFilePath("test_remote.py"), 2); } @Override public void testing() throws Exception { waitForPause(); eval("sys.argv[1]").hasValue("'subprocess'"); eval("sys.argv[2]").hasValue("'etc etc'"); resume(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-iron", "-jython"); //can't run on iron and jython } }); } @Test @Staging public void testPyQtQThreadInheritor() throws Exception { if (UsefulTestCase.IS_UNDER_TEAMCITY && SystemInfo.isWindows) { return; //Don't run under Windows } runPythonTest(new PyDebuggerTask("/debug", "test_pyqt1.py") { @Override protected void init() { setMultiprocessDebug(true); } @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 8); } @Override public void testing() throws Exception { waitForPause(); eval("i").hasValue("0"); resume(); waitForPause(); eval("i").hasValue("1"); resume(); } @NotNull @Override public Set<String> getTags() { return Sets.newHashSet("pyqt5"); } }); } @Test @Staging public void testPyQtMoveToThread() throws Exception { if (UsefulTestCase.IS_UNDER_TEAMCITY && SystemInfo.isWindows) { return; //Don't run under Windows } runPythonTest(new PyDebuggerTask("/debug", "test_pyqt2.py") { @Override protected void init() { setMultiprocessDebug(true); } @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 10); } @Override public void testing() throws Exception { waitForPause(); eval("i").hasValue("0"); resume(); waitForPause(); eval("i").hasValue("1"); resume(); } @NotNull @Override public Set<String> getTags() { return Sets.newHashSet("pyqt5"); } }); } @Test @Staging public void testPyQtQRunnableInheritor() throws Exception { if (UsefulTestCase.IS_UNDER_TEAMCITY && SystemInfo.isWindows) { return; //Don't run under Windows } runPythonTest(new PyDebuggerTask("/debug", "test_pyqt3.py") { @Override protected void init() { setMultiprocessDebug(true); } @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 9); } @Override public void testing() throws Exception { waitForPause(); eval("i").hasValue("0"); resume(); waitForPause(); eval("i").hasValue("1"); resume(); } @NotNull @Override public Set<String> getTags() { return Sets.newHashSet("pyqt5"); } }); } @Test public void testStepOverYieldFrom() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_step_over_yield.py") { @Override protected void init() { setMultiprocessDebug(true); } @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 6); } @Override public void testing() throws Exception { waitForPause(); stepOver(); waitForPause(); eval("a").hasValue("42"); stepOver(); waitForPause(); eval("a").hasValue("42"); stepOver(); waitForPause(); eval("sum").hasValue("6"); resume(); } @NotNull @Override public Set<String> getTags() { return Sets.newHashSet("python34"); } }); } @Test public void testSteppingFilter() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_stepping_filter.py") { @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 4); List<PySteppingFilter> filters = new ArrayList<>(); filters.add(new PySteppingFilter(true, "*/test_m?_code.py")); final PyDebuggerSettings debuggerSettings = PyDebuggerSettings.getInstance(); debuggerSettings.setLibrariesFilterEnabled(true); debuggerSettings.setSteppingFiltersEnabled(true); debuggerSettings.setSteppingFilters(filters); } @Override public void doFinally() { final PyDebuggerSettings debuggerSettings = PyDebuggerSettings.getInstance(); debuggerSettings.setLibrariesFilterEnabled(false); debuggerSettings.setSteppingFiltersEnabled(false); } @Override public void testing() throws Exception { waitForPause(); stepInto(); waitForPause(); eval("stopped_in_user_file").hasValue("True"); stepInto(); waitForPause(); eval("stopped_in_user_file").hasValue("True"); } }); } @Test public void testReturnValues() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_return_values.py") { @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 7); toggleBreakpoint(getScriptName(), 11); final PyDebuggerSettings debuggerSettings = PyDebuggerSettings.getInstance(); debuggerSettings.setWatchReturnValues(true); } @Override public void doFinally() { final PyDebuggerSettings debuggerSettings = PyDebuggerSettings.getInstance(); debuggerSettings.setWatchReturnValues(false); } @Override public void testing() throws Exception { waitForPause(); eval(PyDebugValue.RETURN_VALUES_PREFIX + "['bar'][0]").hasValue("1"); resume(); waitForPause(); eval(PyDebugValue.RETURN_VALUES_PREFIX + "['foo']").hasValue("33"); resume(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-iron"); } }); } @Test @Staging public void testSuspendAllThreadsPolicy() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_two_threads.py") { @Override protected void init() { setMultiprocessDebug(true); } @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 12); setBreakpointSuspendPolicy(getProject(), 12, SuspendPolicy.ALL); } @Override public void testing() throws Exception { waitForAllThreadsPause(); eval("m").hasValue("42"); assertNull(getRunningThread()); resume(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-iron"); } }); } @Test @Staging public void testSuspendAllThreadsResume() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_two_threads_resume.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 10); setBreakpointSuspendPolicy(getProject(), 10, SuspendPolicy.ALL); } @Override public void testing() throws Exception { waitForPause(); eval("x").hasValue("12"); resume(); waitForPause(); eval("x").hasValue("12"); resume(); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-iron"); } }); } @Test @Staging public void testSuspendOneThreadPolicy() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_two_threads.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 12); setBreakpointSuspendPolicy(getProject(), 12, SuspendPolicy.THREAD); } @Override public void testing() throws Exception { waitForPause(); eval("m").hasValue("42"); assertEquals("Thread1", getRunningThread()); resume(); } }); } @Test @Staging public void testShowReferringObjects() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_ref.py") { @Override public void before() throws Exception { toggleBreakpoint(getFilePath(getScriptName()), 3); } @Override public void testing() throws Exception { waitForPause(); int numberOfReferringObjects = getNumberOfReferringObjects("l"); assertEquals(3, numberOfReferringObjects); } @NotNull @Override public Set<String> getTags() { return ImmutableSet.of("-iron"); } }); } @Staging @Test public void testResume() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_resume.py") { @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 1); } @Override public void testing() throws Exception { waitForPause(); eval("x").hasValue("1"); resume(); waitForPause(); eval("x").hasValue("2"); resume(); } }); } //TODO: That doesn't work now: case from test_continuation.py and test_continuation2.py are treated differently by interpreter // (first line is executed in first case and last line in second) @Staging @Test public void testBreakOnContinuationLine() throws Exception { runPythonTest(new PyDebuggerTask("/debug", "test_continuation.py") { @Override public void before() throws Exception { toggleBreakpoint(getScriptName(), 13); } @Override public void testing() throws Exception { waitForPause(); eval("x").hasValue("0"); stepOver(); waitForPause(); eval("x").hasValue("1"); stepOver(); waitForPause(); eval("x").hasValue("2"); } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.dubbo.remoting.handler; import com.alibaba.dubbo.common.Constants; import com.alibaba.dubbo.remoting.Channel; import com.alibaba.dubbo.remoting.RemotingException; import com.alibaba.dubbo.remoting.exchange.ExchangeChannel; import com.alibaba.dubbo.remoting.exchange.ExchangeHandler; import com.alibaba.dubbo.remoting.exchange.Request; import com.alibaba.dubbo.remoting.exchange.Response; import com.alibaba.dubbo.remoting.exchange.support.header.HeaderExchangeHandler; import org.junit.Assert; import org.junit.Test; import java.util.concurrent.atomic.AtomicInteger; //TODO response test public class HeaderExchangeHandlerTest { @Test public void test_received_request_oneway() throws RemotingException { final Channel mchannel = new MockedChannel(); final Person requestdata = new Person("charles"); Request request = new Request(); request.setTwoWay(false); request.setData(requestdata); ExchangeHandler exhandler = new MockedExchangeHandler() { public void received(Channel channel, Object message) throws RemotingException { Assert.assertEquals(requestdata, message); } }; HeaderExchangeHandler hexhandler = new HeaderExchangeHandler(exhandler); hexhandler.received(mchannel, request); } @Test public void test_received_request_twoway() throws RemotingException { final Person requestdata = new Person("charles"); final Request request = new Request(); request.setTwoWay(true); request.setData(requestdata); final AtomicInteger count = new AtomicInteger(0); final Channel mchannel = new MockedChannel() { @Override public void send(Object message) throws RemotingException { Response res = (Response) message; Assert.assertEquals(request.getId(), res.getId()); Assert.assertEquals(request.getVersion(), res.getVersion()); Assert.assertEquals(Response.OK, res.getStatus()); Assert.assertEquals(requestdata, res.getResult()); Assert.assertEquals(null, res.getErrorMessage()); count.incrementAndGet(); } }; ExchangeHandler exhandler = new MockedExchangeHandler() { @Override public Object reply(ExchangeChannel channel, Object request) throws RemotingException { return request; } public void received(Channel channel, Object message) throws RemotingException { Assert.fail(); } }; HeaderExchangeHandler hexhandler = new HeaderExchangeHandler(exhandler); hexhandler.received(mchannel, request); Assert.assertEquals(1, count.get()); } @Test(expected = IllegalArgumentException.class) public void test_received_request_twoway_error_nullhandler() throws RemotingException { new HeaderExchangeHandler(null); } @Test public void test_received_request_twoway_error_reply() throws RemotingException { final Person requestdata = new Person("charles"); final Request request = new Request(); request.setTwoWay(true); request.setData(requestdata); final AtomicInteger count = new AtomicInteger(0); final Channel mchannel = new MockedChannel() { @Override public void send(Object message) throws RemotingException { Response res = (Response) message; Assert.assertEquals(request.getId(), res.getId()); Assert.assertEquals(request.getVersion(), res.getVersion()); Assert.assertEquals(Response.SERVICE_ERROR, res.getStatus()); Assert.assertNull(res.getResult()); Assert.assertTrue(res.getErrorMessage().contains(BizException.class.getName())); count.incrementAndGet(); } }; ExchangeHandler exhandler = new MockedExchangeHandler() { @Override public Object reply(ExchangeChannel channel, Object request) throws RemotingException { throw new BizException(); } }; HeaderExchangeHandler hexhandler = new HeaderExchangeHandler(exhandler); hexhandler.received(mchannel, request); Assert.assertEquals(1, count.get()); } @Test public void test_received_request_twoway_error_reqeustBroken() throws RemotingException { final Request request = new Request(); request.setTwoWay(true); request.setData(new BizException()); request.setBroken(true); final AtomicInteger count = new AtomicInteger(0); final Channel mchannel = new MockedChannel() { @Override public void send(Object message) throws RemotingException { Response res = (Response) message; Assert.assertEquals(request.getId(), res.getId()); Assert.assertEquals(request.getVersion(), res.getVersion()); Assert.assertEquals(Response.BAD_REQUEST, res.getStatus()); Assert.assertNull(res.getResult()); Assert.assertTrue(res.getErrorMessage().contains(BizException.class.getName())); count.incrementAndGet(); } }; HeaderExchangeHandler hexhandler = new HeaderExchangeHandler(new MockedExchangeHandler()); hexhandler.received(mchannel, request); Assert.assertEquals(1, count.get()); } @Test public void test_received_request_event_readonly() throws RemotingException { final Request request = new Request(); request.setTwoWay(true); request.setEvent(Request.READONLY_EVENT); final Channel mchannel = new MockedChannel(); HeaderExchangeHandler hexhandler = new HeaderExchangeHandler(new MockedExchangeHandler()); hexhandler.received(mchannel, request); Assert.assertTrue(mchannel.hasAttribute(Constants.CHANNEL_ATTRIBUTE_READONLY_KEY)); } @Test public void test_received_request_event_other_discard() throws RemotingException { final Request request = new Request(); request.setTwoWay(true); request.setEvent("my event"); final Channel mchannel = new MockedChannel() { @Override public void send(Object message) throws RemotingException { Assert.fail(); } }; HeaderExchangeHandler hexhandler = new HeaderExchangeHandler(new MockedExchangeHandler() { @Override public Object reply(ExchangeChannel channel, Object request) throws RemotingException { Assert.fail(); throw new RemotingException(channel, ""); } @Override public void received(Channel channel, Object message) throws RemotingException { Assert.fail(); throw new RemotingException(channel, ""); } }); hexhandler.received(mchannel, request); } private class BizException extends RuntimeException { private static final long serialVersionUID = 1L; } private class MockedExchangeHandler extends MockedChannelHandler implements ExchangeHandler { public String telnet(Channel channel, String message) throws RemotingException { throw new UnsupportedOperationException(); } public Object reply(ExchangeChannel channel, Object request) throws RemotingException { throw new UnsupportedOperationException(); } } private class Person { private String name; public Person(String name) { super(); this.name = name; } @Override public String toString() { return "Person [name=" + name + "]"; } } }
/* * $Id$ */ /* Copyright (c) 2000-2016 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.servlet; import javax.servlet.*; import java.io.*; import java.util.*; import java.net.*; import java.util.regex.Pattern; import org.apache.commons.lang3.ObjectUtils; import org.mortbay.http.*; import org.mortbay.html.*; import org.lockss.daemon.status.*; import org.lockss.util.*; import org.lockss.plugin.*; import org.lockss.state.*; /** ViewContent servlet displays cached content */ public class ViewContent extends LockssServlet { static final Logger log = Logger.getLogger("ViewContent"); private String action; private String verbose; private String auid; private String url; private ArchivalUnit au; private CachedUrl cu; private long clen; private String ctype; private CIProperties props; private PrintWriter wrtr = null; private String encapsulate; private PluginManager pluginMgr; private AdminServletManager srvltMgr; // don't hold onto objects after request finished protected void resetLocals() { wrtr = null; au = null; cu = null; url = null; auid = null; props = null; super.resetLocals(); } public void init(ServletConfig config) throws ServletException { super.init(config); pluginMgr = getLockssDaemon().getPluginManager(); try { srvltMgr = (AdminServletManager)getServletManager(); } catch (RuntimeException e) { log.warning("Can't find LocalServletManager", e); } } /** * Handle a request * @throws IOException */ public void lockssHandleRequest() throws IOException { if (!pluginMgr.areAusStarted()) { displayNotStarted(); return; } verbose = getParameter("verbose"); auid = getParameter("auid"); url = getParameter("url"); if (StringUtil.isNullString(url)) { displayForm(); return; } au = pluginMgr.getAuFromId(auid); if (au == null) { displayNotFound("No such AU: " + auid); return; } cu = au.makeCachedUrl(url); if (cu == null) { displayNotFound("URL " + url + " not found in AU: " + au.getName()); return; } boolean hasIncludedContent = cu.hasContent(); cu.setOption(CachedUrl.OPTION_INCLUDED_ONLY, "false"); String versionStr = getParameter("version"); if (versionStr != null) { try { int version = Integer.parseInt(versionStr); int curVer = cu.getVersion(); if (version != curVer) { CachedUrl verCu = cu.getCuVersion(version); verCu.setOption(CachedUrl.OPTION_INCLUDED_ONLY, "false"); if (verCu != null && verCu.hasContent()) { cu = verCu; } else { errMsg = "Couldn't find version " + versionStr + ", displaying current version"; } } } catch (NumberFormatException e) { log.error("Couldn't parse version string: " + versionStr); errMsg = "Illegal version: " + versionStr + ", displaying current version"; } catch (RuntimeException e) { log.error("Couldn't get file version", e); errMsg = "Couldn't find version " + versionStr + ", displaying current version"; } } if (!cu.hasContent()) { if (versionStr != null) { displayNotFound("Version " + versionStr + " of URL " + url + " has no content in AU: " + au.getName()); } else { displayNotFound("URL " + url + " not found in AU: " + au.getName()); } return; } clen = cu.getContentSize(); try { props = cu.getProperties(); ctype = cu.getContentType(); String frame = getParameter("frame"); if (StringUtil.isNullString(frame)) { if (isFrameType(ctype)) { displayFrameSet(); } else { displaySummary(false, hasIncludedContent); } } else if ("content".equalsIgnoreCase(frame)) { displayContent(); } else if ("summary".equalsIgnoreCase(frame)) { setFramed(true); displaySummary(true, hasIncludedContent); } else { displayError(HttpResponse.__400_Bad_Request, "Illegal frame parameter: " + frame); } } finally { cu.release(); } } boolean isFrameType(String ctype) { if (StringUtil.isNullString(ctype)) return false; String mimeType = HeaderUtil.getMimeTypeFromContentType(ctype); if (StringUtil.isNullString(mimeType)) return false; for (Iterator iter = srvltMgr.inFrameContentTypes().iterator(); iter.hasNext(); ) { if (StringUtil.startsWithIgnoreCase(mimeType, (String)iter.next())) { return true; } } return false; } void displayFrameSet() throws IOException { FrameSet set = new FrameSet(getPageTitle(),"*","*,*"); addBarePageHeading(set); Properties args = getParamsAsProps(); args.setProperty("frame", "summary"); set.frame(0,0).name("CuMeta", srvURL(myServletDescr(), args)); args.setProperty("frame", "content"); set.frame(0,1).name("CuContent", srvURL(myServletDescr(), args)); set.write(resp.getWriter()); } void displaySummary(boolean contentInOtherFrame, boolean hasIncludedContent) throws IOException { Page page = newPage(); layoutErrorBlock(page); Table tbl = new Table(0, "ALIGN=CENTER CELLSPACING=2 CELLPADDING=0"); tbl.newRow(); tbl.newCell("align=left"); tbl.add("AU:"); tbl.newCell("align=left"); tbl.add(getAuLink(au)); tbl.newRow(); tbl.newCell("align=left"); tbl.add("URL:&nbsp;"); tbl.newCell("align=left"); tbl.add(url); if (!hasIncludedContent) { tbl.newRow(); tbl.newCell("colspan=2 align=left"); tbl.add("<b>Excluded by crawl rules - hidden from normal processing</b>"); } page.add("<font size=+1>"); page.add(tbl); page.add("</font>"); tbl = new Table(0, "ALIGN=CENTER CELLSPACING=2 CELLPADDING=0"); // tbl.newRow(); // tbl.newCell("colspan=2 align=center"); String contentTypeHeader = props.getProperty(CachedUrl.PROPERTY_CONTENT_TYPE); String contentType = cu.getContentType(); if (StringUtil.equalStrings(contentType, contentTypeHeader)) { addPropRow(tbl, "Content Type", contentType); } else { addPropRow(tbl, "Content Type" + addFootnote("Inferred by plugin"), contentType); } addPropRow(tbl, "Length", clen); try { String versionStr = Integer.toString(cu.getVersion()); CachedUrl[] cuVersions = cu.getCuVersions(2); if (cuVersions.length > 1) { // If multiple versions, include link to version table Properties args = PropUtil.fromArgs("table", ArchivalUnitStatus.FILE_VERSIONS_TABLE_NAME, "key", au.getAuId()); args.setProperty("url", url); StringBuilder sb = new StringBuilder(versionStr); sb.append("&nbsp;&nbsp;"); sb.append(srvLink(AdminServletManager.SERVLET_DAEMON_STATUS, "Other versions", args)); versionStr = sb.toString(); } addPropRow(tbl, "Version #", versionStr); } catch (RuntimeException e) { log.warning("Can't get cu version: " + cu.getUrl(), e); } try { long sdate = Long.parseLong(props.getProperty(CachedUrl.PROPERTY_FETCH_TIME)); addPropRow(tbl, "Collected at", ServletUtil.headerDf.format(new Date(sdate))); } catch (NumberFormatException ignore) { } String repairFrom = props.getProperty(CachedUrl.PROPERTY_REPAIR_FROM); if (!StringUtil.isNullString(repairFrom)) { addPropRow(tbl, "Repaired from", repairFrom); try { long rdate = Long.parseLong(props.getProperty(CachedUrl.PROPERTY_REPAIR_DATE)); addPropRow(tbl, "Repair date", ServletUtil.headerDf.format(new Date(rdate))); } catch (NumberFormatException ignore) { } } if (!StringUtil.isNullString(getParameter("showall"))) { tbl.newRow(); tbl.newRow(); tbl.newCell("align=left"); tbl.add("<b>Raw Headers"); Set<String> keys = new TreeSet(props.keySet()); for (String key : keys) { addPropRow(tbl, key, props.getProperty(key)); } } else { Properties args = getParamsAsProps(); args.remove("frame"); args.setProperty("showall", "1"); if (contentInOtherFrame) { args.setProperty("frame", "summary"); } tbl.newRow(); tbl.newCell("align=left"); Link lnk = new Link(srvURL(myServletDescr(), args), "Show all"); if (contentInOtherFrame) { lnk.attribute("target", "CuMeta"); } tbl.add(lnk); } CachedUrl cu = au.makeCachedUrl(url); try { if (cu.hasContent()) { if (au.getLinkExtractor(cu.getContentType()) != null) { tbl.newRow(); tbl.newCell("align=left"); Link extrlnk = new Link(srvURL(AdminServletManager.SERVLET_LIST_OBJECTS, PropUtil.fromArgs("type", "extracturls", "auid", au.getAuId(), "url", url)), "Extract URLs"); tbl.add(extrlnk); } } } finally { AuUtil.safeRelease(cu); } page.add(tbl); page.add("<br>"); Composite comp = new Block(Block.Center); if (contentInOtherFrame) { comp.add("Page is displayed below. Most intra-site links will not work."); } else { Properties args = getParamsAsProps(); args.setProperty("frame", "content"); comp.add(srvLink(myServletDescr(), "Click here to download/play content", args)); } page.add(comp); // page.add(getFooter()); endPageNoFooter(page); } void addPropRow(Table tbl, String prop, long val) { addPropRow(tbl, prop, Long.toString(val)); } void addPropRow(Table tbl, String prop, String val) { tbl.newRow(); tbl.newCell("align=left"); tbl.add(prop); tbl.add(":&nbsp;"); tbl.newCell("align=left"); tbl.add(val); } void displayContent() { if (log.isDebug3()) { log.debug3("props: " + props); log.debug3("ctype: " + ctype); log.debug3("clen: " + clen); } boolean isFilter = getParameter("filter") != null; resp.setContentType(ctype); // Set as inline content with name, if PDF or unframed content if (!isFrameType(ctype)) { String fname = ObjectUtils.defaultIfNull(ServletUtil.getContentOriginalFilename(cu, true), "UnnamedContent"); resp.setHeader("Content-disposition", "inline; filename=" + fname); } // if filtering, don't know content length if (!isFilter) { if (clen <= Integer.MAX_VALUE) { resp.setContentLength((int)clen); } else { resp.setHeader(HttpFields.__ContentLength, Long.toString(clen)); } } OutputStream out = null; InputStream in = null; try { out = resp.getOutputStream(); if (isFilter) { in = cu.openForHashing(); } else { in = cu.getUncompressedInputStream(); } StreamUtil.copy(in, out); } catch (IOException e) { log.warning("Copying CU to HTTP stream", e); } finally { if (in != null) try {in.close();} catch (IOException ignore) {} if (out != null) try {out.close();} catch (IOException ignore) {} } cu.release(); } void displayForm() throws IOException { displayForm(null); } void displayForm(String error) throws IOException { } void displayNotFound(String error) throws IOException { displayError(HttpResponse.__404_Not_Found, error); } void displayError(int result, String error) throws IOException { Page page = newPage(); Composite comp = new Composite(); comp.add("<center><font color=red size=+1>"); comp.add(error); comp.add("</font></center><br>"); page.add(comp); endPage(page); } Link getAuLink(ArchivalUnit au) { return new Link(srvURL(AdminServletManager.SERVLET_DAEMON_STATUS, PropUtil.fromArgs("table", ArchivalUnitStatus.AU_STATUS_TABLE_NAME, "key", au.getAuId())), au.getName()); } }
package zornco.reploidcraft.core; import net.minecraft.client.gui.GuiScreen; import net.minecraft.entity.EntityLivingBase; import net.minecraft.entity.boss.EntityDragon; import net.minecraft.entity.boss.EntityWither; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.monster.EntityBlaze; import net.minecraft.entity.monster.EntityCreeper; import net.minecraft.entity.monster.EntityEnderman; import net.minecraft.entity.monster.EntityGhast; import net.minecraft.entity.monster.EntityMagmaCube; import net.minecraft.entity.monster.EntityPigZombie; import net.minecraft.entity.monster.EntitySkeleton; import net.minecraft.entity.monster.EntitySlime; import net.minecraft.entity.monster.EntitySpider; import net.minecraft.entity.monster.EntityZombie; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.item.ItemStack; import net.minecraftforge.client.event.MouseEvent; import net.minecraftforge.common.MinecraftForge; import net.minecraftforge.event.entity.living.LivingDropsEvent; import net.minecraftforge.event.entity.player.AttackEntityEvent; import net.minecraftforge.event.entity.player.EntityItemPickupEvent; import zornco.reploidcraft.ReploidCraft; import zornco.reploidcraft.items.IKeyBound; import zornco.reploidcraft.items.ItemHPEnergy; import zornco.reploidcraft.items.ItemTank; import zornco.reploidcraft.sounds.Sounds; import cpw.mods.fml.client.FMLClientHandler; import cpw.mods.fml.common.eventhandler.SubscribeEvent; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; /** * Name and cast of this class are irrelevant */ public class EventBus { public EventBus() { MinecraftForge.EVENT_BUS.register(this); } @SideOnly(Side.CLIENT) @SubscribeEvent public void onMouseEvent(MouseEvent event) { if(FMLClientHandler.instance().getClient().currentScreen == null) { ItemStack buster = FMLClientHandler.instance().getClient().thePlayer.getHeldItem(); if(buster != null && buster.getItem() != null && buster.getItem() instanceof IKeyBound) { if(GuiScreen.isShiftKeyDown()) { int k = event.dwheel; if(k != 0) { if(k > 0) { ((IKeyBound)buster.getItem()).doKeyBindingAction(FMLClientHandler.instance().getClient().thePlayer, buster, "up"); } else { ((IKeyBound)buster.getItem()).doKeyBindingAction(FMLClientHandler.instance().getClient().thePlayer, buster, "down"); } event.setCanceled(true); } } } } } public void onEntityDrop(LivingDropsEvent event) { EntityLivingBase victim = event.entityLiving; if(ReploidCraft.rand.nextInt(64) == 0) { System.out.println("DING"); if (victim instanceof EntityBlaze) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 1))); } if (victim instanceof EntityCreeper) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 3))); } if (victim instanceof EntityEnderman) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 4))); } if (victim instanceof EntityDragon) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 5))); } if (victim instanceof EntityGhast) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 6))); } if (victim instanceof EntityMagmaCube) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 7))); } if (victim instanceof EntityPigZombie) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 8))); } if (victim instanceof EntitySkeleton) { if(((EntitySkeleton)victim).getSkeletonType() == 1) event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 13))); else event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 9))); } if (victim instanceof EntitySlime) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 10))); } if (victim instanceof EntitySpider) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 11))); } if (victim instanceof EntityWither) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 12))); } if (victim instanceof EntityZombie) { event.drops.add(new EntityItem(victim.worldObj, victim.posX, victim.posY+0.2, victim.posZ, new ItemStack(ReploidCraft.upgradeChip, 1, 14))); } } } @SubscribeEvent public void onEntityAttack(AttackEntityEvent event) { //if(event.target != null && event.entityLiving != null){ //System.out.println(event.target != null ? event.target.getClass().toString():"null"); //if(event.target instanceof EntityDragonPart) System.out.println(((EntityDragonPart)event.target).field_146032_b); //System.out.println(event.entityLiving != null ? event.entityLiving.getClass().toString():"null"); //} } /** * The key is the @ForgeSubscribe annotation and the cast of the Event you put in as argument. * The method name you pick does not matter. Method signature is public void, always. */ @SubscribeEvent public void entityPickup(EntityItemPickupEvent event) { /* * You can then proceed to read and change the Event's fields where possible */ if(event.entityLiving == null || event.item == null) return; EntityLivingBase playerEnt = event.entityLiving; EntityItem item = event.item; /* * Note this possibility to interrupt certain (not all) events */ /*if (event.isCancelable()) { event.setCanceled(true); }*/ if(item.getEntityItem().getItem() == ReploidCraft.healthBit || item.getEntityItem().getItem() == ReploidCraft.healthByte //|| item.getEntityItem().getItem() == ReploidCraft.weaponBit //|| item.getEntityItem().getItem() == ReploidCraft.weaponByte ) { if(event.entityLiving instanceof EntityPlayerMP && !playerEnt.isSneaking()) { ItemHPEnergy bit = (ItemHPEnergy) item.getEntityItem() .getItem(); switch (bit.type) { case 0: playerEnt.worldObj.playSoundAtEntity(playerEnt, Sounds.BIT, 1.0F, 1.0F); break; case 1: playerEnt.worldObj.playSoundAtEntity(playerEnt, Sounds.BYTE, 1.0F, 1.0F); break; } if (event.entityLiving.getHealth() == event.entityLiving.getMaxHealth()) { processBit((EntityPlayerMP)event.entityLiving, item); } else { bit.applyEffect(playerEnt, item.getEntityItem().stackSize); } item.getEntityItem().stackSize = 0; item.setDead(); return; } } /* * Events may offer further fields and methods. Just read them, it should be obvious. */ } public void processBit(EntityPlayerMP player, EntityItem item) { for (int i = 0; i < 36; i++) { ItemStack is = player.inventory.getStackInSlot(i); if (is == null) { continue; } if (is.getItem().equals(ReploidCraft.healthTank)) { if (ItemTank.getType(is).isEmpty()) { ItemTank.setType(is, "HP"); is.setItemDamage(is.getItemDamage() <= 0 ? 0 : is.getItemDamage() - item.getEntityItem().stackSize*bitSize(item)); break; } if ((!ItemTank.getType(is).equals("HP"))) continue; if (is.getItemDamage() == 0) continue; is.setItemDamage(is.getItemDamage() <= 0 ? 0 : is.getItemDamage() - item.getEntityItem().stackSize*bitSize(item)); break; } } } private int bitSize(EntityItem item) { if(item.getEntityItem().getItem() == ReploidCraft.healthBit)// || item.getEntityItem().getItem() == ReploidCraft.weaponBit ) return 3; else if(item.getEntityItem().getItem() == ReploidCraft.healthByte)// || item.getEntityItem().getItem() == ReploidCraft.weaponByte ) return 6; else return 0; } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.sunshine.app.data; import android.content.ComponentName; import android.content.ContentUris; import android.content.ContentValues; import android.content.pm.PackageManager; import android.content.pm.ProviderInfo; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.net.Uri; import android.os.Build; import android.test.AndroidTestCase; import android.util.Log; import com.example.android.sunshine.app.data.WeatherContract.LocationEntry; import com.example.android.sunshine.app.data.WeatherContract.WeatherEntry; /* Note: This is not a complete set of tests of the Sunshine ContentProvider, but it does test that at least the basic functionality has been implemented correctly. Students: Uncomment the tests in this class as you implement the functionality in your ContentProvider to make sure that you've implemented things reasonably correctly. */ public class TestProvider extends AndroidTestCase { public static final String LOG_TAG = TestProvider.class.getSimpleName(); static private final int BULK_INSERT_RECORDS_TO_INSERT = 10; static ContentValues[] createBulkInsertWeatherValues(long locationRowId) { long currentTestDate = TestUtilities.TEST_DATE; long millisecondsInADay = 1000 * 60 * 60 * 24; ContentValues[] returnContentValues = new ContentValues[BULK_INSERT_RECORDS_TO_INSERT]; for (int i = 0; i < BULK_INSERT_RECORDS_TO_INSERT; i++, currentTestDate += millisecondsInADay) { ContentValues weatherValues = new ContentValues(); weatherValues.put(WeatherEntry.COLUMN_LOC_KEY, locationRowId); weatherValues.put(WeatherEntry.COLUMN_DATE, currentTestDate); weatherValues.put(WeatherEntry.COLUMN_DEGREES, 1.1); weatherValues.put(WeatherEntry.COLUMN_HUMIDITY, 1.2 + 0.01 * (float) i); weatherValues.put(WeatherEntry.COLUMN_PRESSURE, 1.3 - 0.01 * (float) i); weatherValues.put(WeatherEntry.COLUMN_MAX_TEMP, 75 + i); weatherValues.put(WeatherEntry.COLUMN_MIN_TEMP, 65 - i); weatherValues.put(WeatherEntry.COLUMN_SHORT_DESC, "Asteroids"); weatherValues.put(WeatherEntry.COLUMN_WIND_SPEED, 5.5 + 0.2 * (float) i); weatherValues.put(WeatherEntry.COLUMN_WEATHER_ID, 321); returnContentValues[i] = weatherValues; } return returnContentValues; } /* This helper function deletes all records from both database tables using the ContentProvider. It also queries the ContentProvider to make sure that the database has been successfully deleted, so it cannot be used until the Query and Delete functions have been written in the ContentProvider. Students: Replace the calls to deleteAllRecordsFromDB with this one after you have written the delete functionality in the ContentProvider. */ public void deleteAllRecordsFromProvider() { mContext.getContentResolver().delete( WeatherEntry.CONTENT_URI, null, null ); mContext.getContentResolver().delete( LocationEntry.CONTENT_URI, null, null ); Cursor cursor = mContext.getContentResolver().query( WeatherEntry.CONTENT_URI, null, null, null, null ); assertEquals("Error: Records not deleted from Weather table during delete", 0, cursor.getCount()); cursor.close(); cursor = mContext.getContentResolver().query( LocationEntry.CONTENT_URI, null, null, null, null ); assertEquals("Error: Records not deleted from Location table during delete", 0, cursor.getCount()); cursor.close(); } /* Student: Refactor this function to use the deleteAllRecordsFromProvider functionality once you have implemented delete functionality there. */ public void deleteAllRecords() { deleteAllRecordsFromProvider(); } // Since we want each test to start with a clean slate, run deleteAllRecords // in setUp (called by the test runner before each test). @Override protected void setUp() throws Exception { super.setUp(); deleteAllRecords(); } /* This test checks to make sure that the content provider is registered correctly. Students: Uncomment this test to make sure you've correctly registered the WeatherProvider. */ public void testProviderRegistry() { PackageManager pm = mContext.getPackageManager(); // We define the component name based on the package name from the context and the // WeatherProvider class. ComponentName componentName = new ComponentName(mContext.getPackageName(), WeatherProvider.class.getName()); try { // Fetch the provider info using the component name from the PackageManager // This throws an exception if the provider isn't registered. ProviderInfo providerInfo = pm.getProviderInfo(componentName, 0); // Make sure that the registered authority matches the authority from the Contract. assertEquals("Error: WeatherProvider registered with authority: " + providerInfo.authority + " instead of authority: " + WeatherContract.CONTENT_AUTHORITY, providerInfo.authority, WeatherContract.CONTENT_AUTHORITY); } catch (PackageManager.NameNotFoundException e) { // I guess the provider isn't registered correctly. assertTrue("Error: WeatherProvider not registered at " + mContext.getPackageName(), false); } } /* This test doesn't touch the database. It verifies that the ContentProvider returns the correct type for each type of URI that it can handle. Students: Uncomment this test to verify that your implementation of GetType is functioning correctly. */ public void testGetType() { // content://com.example.android.sunshine.app/weather/ String type = mContext.getContentResolver().getType(WeatherEntry.CONTENT_URI); // vnd.android.cursor.dir/com.example.android.sunshine.app/weather assertEquals("Error: the WeatherEntry CONTENT_URI should return WeatherEntry.CONTENT_TYPE", WeatherEntry.CONTENT_TYPE, type); String testLocation = "94074"; // content://com.example.android.sunshine.app/weather/94074 type = mContext.getContentResolver().getType( WeatherEntry.buildWeatherLocation(testLocation)); // vnd.android.cursor.dir/com.example.android.sunshine.app/weather assertEquals("Error: the WeatherEntry CONTENT_URI with location should return WeatherEntry.CONTENT_TYPE", WeatherEntry.CONTENT_TYPE, type); long testDate = 1419120000L; // December 21st, 2014 // content://com.example.android.sunshine.app/weather/94074/20140612 type = mContext.getContentResolver().getType( WeatherEntry.buildWeatherLocationWithDate(testLocation, testDate)); // vnd.android.cursor.item/com.example.android.sunshine.app/weather/1419120000 assertEquals("Error: the WeatherEntry CONTENT_URI with location and date should return WeatherEntry.CONTENT_ITEM_TYPE", WeatherEntry.CONTENT_ITEM_TYPE, type); // content://com.example.android.sunshine.app/location/ type = mContext.getContentResolver().getType(LocationEntry.CONTENT_URI); // vnd.android.cursor.dir/com.example.android.sunshine.app/location assertEquals("Error: the LocationEntry CONTENT_URI should return LocationEntry.CONTENT_TYPE", LocationEntry.CONTENT_TYPE, type); } /* This test uses the database directly to insert and then uses the ContentProvider to read out the data. Uncomment this test to see if the basic weather query functionality given in the ContentProvider is working correctly. */ public void testBasicWeatherQuery() { // insert our test records into the database WeatherDbHelper dbHelper = new WeatherDbHelper(mContext); SQLiteDatabase db = dbHelper.getWritableDatabase(); ContentValues testValues = TestUtilities.createNorthPoleLocationValues(); long locationRowId = TestUtilities.insertNorthPoleLocationValues(mContext); // Fantastic. Now that we have a location, add some weather! ContentValues weatherValues = TestUtilities.createWeatherValues(locationRowId); long weatherRowId = db.insert(WeatherEntry.TABLE_NAME, null, weatherValues); assertTrue("Unable to Insert WeatherEntry into the Database", weatherRowId != -1); db.close(); // Test the basic content provider query Cursor weatherCursor = mContext.getContentResolver().query( WeatherEntry.CONTENT_URI, null, null, null, null ); // Make sure we get the correct cursor out of the database TestUtilities.validateCursor("testBasicWeatherQuery", weatherCursor, weatherValues); } /* This test uses the database directly to insert and then uses the ContentProvider to read out the data. Uncomment this test to see if your location queries are performing correctly. */ public void testBasicLocationQueries() { // insert our test records into the database WeatherDbHelper dbHelper = new WeatherDbHelper(mContext); SQLiteDatabase db = dbHelper.getWritableDatabase(); ContentValues testValues = TestUtilities.createNorthPoleLocationValues(); long locationRowId = TestUtilities.insertNorthPoleLocationValues(mContext); // Test the basic content provider query Cursor locationCursor = mContext.getContentResolver().query( LocationEntry.CONTENT_URI, null, null, null, null ); // Make sure we get the correct cursor out of the database TestUtilities.validateCursor("testBasicLocationQueries, location query", locationCursor, testValues); // Has the NotificationUri been set correctly? --- we can only test this easily against API // level 19 or greater because getNotificationUri was added in API level 19. if (Build.VERSION.SDK_INT >= 19) { assertEquals("Error: Location Query did not properly set NotificationUri", locationCursor.getNotificationUri(), LocationEntry.CONTENT_URI); } } /* This test uses the provider to insert and then update the data. Uncomment this test to see if your update location is functioning correctly. */ public void testUpdateLocation() { // Create a new map of values, where column names are the keys ContentValues values = TestUtilities.createNorthPoleLocationValues(); Uri locationUri = mContext.getContentResolver(). insert(LocationEntry.CONTENT_URI, values); long locationRowId = ContentUris.parseId(locationUri); // Verify we got a row back. assertTrue(locationRowId != -1); Log.d(LOG_TAG, "New row id: " + locationRowId); ContentValues updatedValues = new ContentValues(values); updatedValues.put(LocationEntry._ID, locationRowId); updatedValues.put(LocationEntry.COLUMN_CITY_NAME, "Santa's Village"); // Create a cursor with observer to make sure that the content provider is notifying // the observers as expected Cursor locationCursor = mContext.getContentResolver().query(LocationEntry.CONTENT_URI, null, null, null, null); TestUtilities.TestContentObserver tco = TestUtilities.getTestContentObserver(); locationCursor.registerContentObserver(tco); int count = mContext.getContentResolver().update( LocationEntry.CONTENT_URI, updatedValues, LocationEntry._ID + "= ?", new String[]{Long.toString(locationRowId)}); assertEquals(count, 1); // Test to make sure our observer is called. If not, we throw an assertion. // // Students: If your code is failing here, it means that your content provider // isn't calling getContext().getContentResolver().notifyChange(uri, null); tco.waitForNotificationOrFail(); locationCursor.unregisterContentObserver(tco); locationCursor.close(); // A cursor is your primary interface to the query results. Cursor cursor = mContext.getContentResolver().query( LocationEntry.CONTENT_URI, null, // projection LocationEntry._ID + " = " + locationRowId, null, // Values for the "where" clause null // sort order ); TestUtilities.validateCursor("testUpdateLocation. Error validating location entry update.", cursor, updatedValues); cursor.close(); } // Make sure we can still delete after adding/updating stuff // // Student: Uncomment this test after you have completed writing the insert functionality // in your provider. It relies on insertions with testInsertReadProvider, so insert and // query functionality must also be complete before this test can be used. public void testInsertReadProvider() { ContentValues testValues = TestUtilities.createNorthPoleLocationValues(); // Register a content observer for our insert. This time, directly with the content resolver TestUtilities.TestContentObserver tco = TestUtilities.getTestContentObserver(); mContext.getContentResolver().registerContentObserver(LocationEntry.CONTENT_URI, true, tco); Uri locationUri = mContext.getContentResolver().insert(LocationEntry.CONTENT_URI, testValues); // Did our content observer get called? Students: If this fails, your insert location // isn't calling getContext().getContentResolver().notifyChange(uri, null); tco.waitForNotificationOrFail(); mContext.getContentResolver().unregisterContentObserver(tco); long locationRowId = ContentUris.parseId(locationUri); // Verify we got a row back. assertTrue(locationRowId != -1); // Data's inserted. IN THEORY. Now pull some out to stare at it and verify it made // the round trip. // A cursor is your primary interface to the query results. Cursor cursor = mContext.getContentResolver().query( LocationEntry.CONTENT_URI, null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause null // sort order ); TestUtilities.validateCursor("testInsertReadProvider. Error validating LocationEntry.", cursor, testValues); // Fantastic. Now that we have a location, add some weather! ContentValues weatherValues = TestUtilities.createWeatherValues(locationRowId); // The TestContentObserver is a one-shot class tco = TestUtilities.getTestContentObserver(); mContext.getContentResolver().registerContentObserver(WeatherEntry.CONTENT_URI, true, tco); Uri weatherInsertUri = mContext.getContentResolver() .insert(WeatherEntry.CONTENT_URI, weatherValues); assertTrue(weatherInsertUri != null); // Did our content observer get called? Students: If this fails, your insert weather // in your ContentProvider isn't calling // getContext().getContentResolver().notifyChange(uri, null); tco.waitForNotificationOrFail(); mContext.getContentResolver().unregisterContentObserver(tco); // A cursor is your primary interface to the query results. Cursor weatherCursor = mContext.getContentResolver().query( WeatherEntry.CONTENT_URI, // Table to Query null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause null // columns to group by ); TestUtilities.validateCursor("testInsertReadProvider. Error validating WeatherEntry insert.", weatherCursor, weatherValues); // Add the location values in with the weather data so that we can make // sure that the join worked and we actually get all the values back weatherValues.putAll(testValues); // Get the joined Weather and Location data weatherCursor = mContext.getContentResolver().query( WeatherEntry.buildWeatherLocation(TestUtilities.TEST_LOCATION), null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause null // sort order ); TestUtilities.validateCursor("testInsertReadProvider. Error validating joined Weather and Location Data.", weatherCursor, weatherValues); // Get the joined Weather and Location data with a start date weatherCursor = mContext.getContentResolver().query( WeatherEntry.buildWeatherLocationWithStartDate( TestUtilities.TEST_LOCATION, TestUtilities.TEST_DATE), null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause null // sort order ); TestUtilities.validateCursor("testInsertReadProvider. Error validating joined Weather and Location Data with start date.", weatherCursor, weatherValues); // Get the joined Weather data for a specific date weatherCursor = mContext.getContentResolver().query( WeatherEntry.buildWeatherLocationWithDate(TestUtilities.TEST_LOCATION, TestUtilities.TEST_DATE), null, null, null, null ); TestUtilities.validateCursor("testInsertReadProvider. Error validating joined Weather and Location data for a specific date.", weatherCursor, weatherValues); } // Make sure we can still delete after adding/updating stuff // // Student: Uncomment this test after you have completed writing the delete functionality // in your provider. It relies on insertions with testInsertReadProvider, so insert and // query functionality must also be complete before this test can be used. public void testDeleteRecords() { testInsertReadProvider(); // Register a content observer for our location delete. TestUtilities.TestContentObserver locationObserver = TestUtilities.getTestContentObserver(); mContext.getContentResolver().registerContentObserver(LocationEntry.CONTENT_URI, true, locationObserver); // Register a content observer for our weather delete. TestUtilities.TestContentObserver weatherObserver = TestUtilities.getTestContentObserver(); mContext.getContentResolver().registerContentObserver(WeatherEntry.CONTENT_URI, true, weatherObserver); deleteAllRecordsFromProvider(); // Students: If either of these fail, you most-likely are not calling the // getContext().getContentResolver().notifyChange(uri, null); in the ContentProvider // delete. (only if the insertReadProvider is succeeding) locationObserver.waitForNotificationOrFail(); weatherObserver.waitForNotificationOrFail(); mContext.getContentResolver().unregisterContentObserver(locationObserver); mContext.getContentResolver().unregisterContentObserver(weatherObserver); } // Student: Uncomment this test after you have completed writing the BulkInsert functionality // in your provider. Note that this test will work with the built-in (default) provider // implementation, which just inserts records one-at-a-time, so really do implement the // BulkInsert ContentProvider function. public void testBulkInsert() { // first, let's create a location value ContentValues testValues = TestUtilities.createNorthPoleLocationValues(); Uri locationUri = mContext.getContentResolver().insert(LocationEntry.CONTENT_URI, testValues); long locationRowId = ContentUris.parseId(locationUri); // Verify we got a row back. assertTrue(locationRowId != -1); // Data's inserted. IN THEORY. Now pull some out to stare at it and verify it made // the round trip. // A cursor is your primary interface to the query results. Cursor cursor = mContext.getContentResolver().query( LocationEntry.CONTENT_URI, null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause null // sort order ); TestUtilities.validateCursor("testBulkInsert. Error validating LocationEntry.", cursor, testValues); // Now we can bulkInsert some weather. In fact, we only implement BulkInsert for weather // entries. With ContentProviders, you really only have to implement the features you // use, after all. ContentValues[] bulkInsertContentValues = createBulkInsertWeatherValues(locationRowId); // Register a content observer for our bulk insert. TestUtilities.TestContentObserver weatherObserver = TestUtilities.getTestContentObserver(); mContext.getContentResolver().registerContentObserver(WeatherEntry.CONTENT_URI, true, weatherObserver); int insertCount = mContext.getContentResolver().bulkInsert(WeatherEntry.CONTENT_URI, bulkInsertContentValues); // Students: If this fails, it means that you most-likely are not calling the // getContext().getContentResolver().notifyChange(uri, null); in your BulkInsert // ContentProvider method. weatherObserver.waitForNotificationOrFail(); mContext.getContentResolver().unregisterContentObserver(weatherObserver); assertEquals(insertCount, BULK_INSERT_RECORDS_TO_INSERT); // A cursor is your primary interface to the query results. cursor = mContext.getContentResolver().query( WeatherEntry.CONTENT_URI, null, // leaving "columns" null just returns all the columns. null, // cols for "where" clause null, // values for "where" clause WeatherEntry.COLUMN_DATE + " ASC" // sort order == by DATE ASCENDING ); // we should have as many records in the database as we've inserted assertEquals(cursor.getCount(), BULK_INSERT_RECORDS_TO_INSERT); // and let's make sure they match the ones we created cursor.moveToFirst(); for (int i = 0; i < BULK_INSERT_RECORDS_TO_INSERT; i++, cursor.moveToNext()) { TestUtilities.validateCurrentRecord("testBulkInsert. Error validating WeatherEntry " + i, cursor, bulkInsertContentValues[i]); } cursor.close(); } }
package edu.berkeley.thebes.hat.client; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import javax.naming.ConfigurationException; import com.yammer.metrics.core.Histogram; import edu.berkeley.thebes.common.thrift.ServerAddress; import edu.berkeley.thebes.common.thrift.ThriftDataItem; import edu.berkeley.thebes.hat.common.thrift.ReplicaService; import org.apache.thrift.TException; import org.apache.thrift.TSerializer; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.transport.TTransportException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.yammer.metrics.Metrics; import com.yammer.metrics.core.Meter; import com.yammer.metrics.core.Timer; import com.yammer.metrics.core.TimerContext; import edu.berkeley.thebes.common.config.Config; import edu.berkeley.thebes.common.config.ConfigParameterTypes.AtomicityLevel; import edu.berkeley.thebes.common.config.ConfigParameterTypes.IsolationLevel; import edu.berkeley.thebes.common.config.ConfigParameterTypes.SessionLevel; import edu.berkeley.thebes.common.data.DataItem; import edu.berkeley.thebes.common.data.Version; import edu.berkeley.thebes.common.interfaces.IThebesClient; import edu.berkeley.thebes.common.log4j.Log4JConfig; import edu.berkeley.thebes.hat.client.clustering.ReplicaRouter; import edu.berkeley.thebes.hat.common.data.DataDependency; public class ThebesHATClient implements IThebesClient { private static Logger logger = LoggerFactory.getLogger(ThebesHATClient.class); private class VersionVector { private Map<String, Version> versions = Maps.newHashMap(); public void updateVector(List<String> keys, Version newVersion) { for (String key : keys) { if (!versions.containsKey(key) || newVersion.compareTo(versions.get(key)) > 0) { versions.put(key, newVersion); } } } public Version getVersion(String key) { if(versions.containsKey(key)) return versions.get(key); return null; } public void clear() { versions.clear(); } } private final Meter requestMetric = Metrics.newMeter(ThebesHATClient.class, "hat-requests", "requests", TimeUnit.SECONDS); private final Meter operationMetric = Metrics.newMeter(ThebesHATClient.class, "hat-operations", "operations", TimeUnit.SECONDS); private final Meter errorMetric = Metrics.newMeter(ThebesHATClient.class, "hat-errors", "errors", TimeUnit.SECONDS); private final Timer latencyBufferedXactMetric = Metrics.newTimer(ThebesHATClient.class, "hat-latencies-buf-xact", TimeUnit.MILLISECONDS, TimeUnit.SECONDS); private final Timer latencyPerPutTimer = Metrics.newTimer(ThebesHATClient.class, "hat-latencies-per-put", TimeUnit.MILLISECONDS, TimeUnit.SECONDS); private final Histogram putSizeHistogram = Metrics.newHistogram(ThebesHATClient.class, "hat-put-size"); private final Timer latencyPerGetTimer = Metrics.newTimer(ThebesHATClient.class, "hat-latencies-per-get", TimeUnit.MILLISECONDS, TimeUnit.SECONDS); private final Histogram getSizeHistogram = Metrics.newHistogram(ThebesHATClient.class, "hat-get-size"); private static final AtomicInteger LOGICAL_CLOCK = new AtomicInteger(0); private ReplicaRouter router; private boolean transactionInProgress = false; private final short clientID = Config.getClientID(); //ANSI client-side data structures private IsolationLevel isolationLevel = Config.getThebesIsolationLevel(); private Map<String, DataItem> transactionWriteBuffer = Maps.newHashMap(); private Map<String, DataItem> transactionReadBuffer = Maps.newHashMap(); //Session guarantee data structures private SessionLevel sessionLevel = Config.getThebesSessionLevel(); //Atomicity data structures private AtomicityLevel atomicityLevel = Config.getThebesAtomicityLevel(); private VersionVector atomicityVersionVector; public ThebesHATClient() throws FileNotFoundException, ConfigurationException { Log4JConfig.configureLog4J(); Config.initializeClient(); if(atomicityLevel != AtomicityLevel.NO_ATOMICITY && !isolationLevel.atOrHigher(IsolationLevel.READ_COMMITTED)) { /* Begs the question: why have TA and RC as separate? Answer is that this may change if we go with the more permissive "broad interpretation" of RC: c.f., P1 vs. A1 in Berensen et al., SIGMOD '95 */ throw new IllegalStateException("Transactional atomicity guarantees must run at isolation of RC or higher"); } } @Override public void open() throws TTransportException, ConfigurationException, IOException { router = ReplicaRouter.newInstance(Config.getRoutingMode()); atomicityVersionVector = new VersionVector(); } @Override public void beginTransaction() throws TException { transactionWriteBuffer = Maps.newHashMap(); transactionReadBuffer = Maps.newHashMap(); atomicityVersionVector.clear(); transactionInProgress = true; } private void applyWritesInBuffer() throws TException { Version transactionVersion = new Version(clientID, LOGICAL_CLOCK.incrementAndGet(), System.currentTimeMillis()); List<String> transactionKeys = new ArrayList<String>(transactionWriteBuffer.keySet()); logger.trace("Batch put of "+transactionKeys.size()+" keys."); for(String key : transactionKeys) { DataItem queuedWrite = transactionWriteBuffer.get(key); if(isolationLevel.atOrHigher(IsolationLevel.READ_COMMITTED)) { queuedWrite.setVersion(transactionVersion); } if(atomicityLevel == AtomicityLevel.CLIENT) { queuedWrite.setTransactionKeys(transactionKeys); } doPutSync(key, queuedWrite); } if(atomicityLevel == AtomicityLevel.CLIENT) atomicityVersionVector.updateVector(new ArrayList<String>(transactionWriteBuffer.keySet()), transactionVersion); } @Override public boolean commitTransaction() throws TException { transactionInProgress = false; requestMetric.mark(); if(isolationLevel.higherThan(IsolationLevel.NO_ISOLATION)) { TimerContext timer = latencyBufferedXactMetric.time(); applyWritesInBuffer(); timer.stop(); } transactionWriteBuffer.clear(); transactionReadBuffer.clear(); return true; } @Override public void abortTransaction() throws TException { transactionInProgress = false; requestMetric.mark(); transactionWriteBuffer.clear(); transactionReadBuffer.clear(); } @Override public boolean put(String key, ByteBuffer value) throws TException { if(!transactionInProgress) throw new TException("transaction is not in progress"); operationMetric.mark(); long timestamp = System.currentTimeMillis(); DataItem dataItem = new DataItem(value, new Version(clientID, LOGICAL_CLOCK.incrementAndGet(), timestamp)); if(isolationLevel.higherThan(IsolationLevel.NO_ISOLATION) || atomicityLevel != AtomicityLevel.NO_ATOMICITY) { if(isolationLevel == IsolationLevel.REPEATABLE_READ) { transactionReadBuffer.put(key, dataItem); } transactionWriteBuffer.put(key, dataItem); return true; } else { return doPutSync(key, dataItem); } } @Override public ByteBuffer get(String key) throws TException { if(!transactionInProgress) throw new TException("transaction is not in progress"); operationMetric.mark(); if(isolationLevel == IsolationLevel.REPEATABLE_READ && transactionReadBuffer.containsKey(key)) { return transactionReadBuffer.get(key).getData(); } DataItem ret = doGet(key); if(isolationLevel == IsolationLevel.REPEATABLE_READ) { if(ret != null && ret.getTransactionKeys() != null) { /* If the value we just read was part of a transaction that i.) wrote to a key we've already read and ii.) is ordered after the transaction that wrote to that key, then we can't show it. For now, return null. */ for(String atomicKey : ret.getTransactionKeys()) { if(atomicityVersionVector.getVersion(atomicKey) != null && atomicityVersionVector.getVersion(atomicKey).compareTo(ret.getVersion()) < 0) { transactionReadBuffer.put(atomicKey, new DataItem(null, Version.NULL_VERSION)); return null; } } atomicityVersionVector.updateVector(ret.getTransactionKeys(), ret.getVersion()); transactionReadBuffer.put(key, ret); } else { /* If we read a null, we should read null for future reads too! */ transactionReadBuffer.put(key, new DataItem(null, Version.NULL_VERSION)); return null; } } // if this branch evaluates to true, then we're using Transactional Atomicity or RC or greater if(transactionWriteBuffer.containsKey(key)) { if (ret == null || transactionWriteBuffer.get(key).getVersion() .compareTo(ret.getVersion()) > 0) { return transactionWriteBuffer.get(key).getData(); } } if(atomicityLevel != AtomicityLevel.NO_ATOMICITY && ret != null && ret.getTransactionKeys() != null) { atomicityVersionVector.updateVector(ret.getTransactionKeys(), ret.getVersion()); } return ret == null ? null : ret.getData(); } TSerializer serializer = new TSerializer(); private boolean doPutSync(String key, DataItem value) throws TException { TimerContext timer = latencyPerPutTimer.time(); boolean ret; byte[] putBytes = serializer.serialize(value.toThrift()); putSizeHistogram.update(putBytes.length); try { ret = router.put(key, value); } catch (RuntimeException e) { errorMetric.mark(); throw e; } catch (TException e) { errorMetric.mark(); throw e; } finally { timer.stop(); } return ret; } private DataItem doGet(String key) throws TException { TimerContext timer = latencyPerGetTimer.time(); DataItem ret; try { ThriftDataItem tdrRet = router.get(key, atomicityVersionVector.getVersion(key)); if(tdrRet.getData() == null) return null; ret = new DataItem(tdrRet); byte[] getBytes = serializer.serialize(ret.toThrift()); getSizeHistogram.update(getBytes.length); } catch (RuntimeException e) { errorMetric.mark(); throw e; } catch (TException e) { errorMetric.mark(); throw e; } finally { timer.stop(); } return ret; } @Override public void sendCommand(String cmd) throws TException { throw new UnsupportedOperationException(); } public void close() { return; } }
/** */ package bpsim.impl; import bpsim.BpsimPackage; import bpsim.Calendar; import bpsim.ElementParameters; import bpsim.ElementParametersType; import bpsim.Scenario; import bpsim.ScenarioParameters; import bpsim.ScenarioParametersType; import bpsim.VendorExtension; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Scenario</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link bpsim.impl.ScenarioImpl#getScenarioParameters <em>Scenario Parameters</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getElementParameters <em>Element Parameters</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getCalendar <em>Calendar</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getVendorExtension <em>Vendor Extension</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getAuthor <em>Author</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getCreated <em>Created</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getDescription <em>Description</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getId <em>Id</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getInherits <em>Inherits</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getModified <em>Modified</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getName <em>Name</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getResult <em>Result</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getVendor <em>Vendor</em>}</li> * <li>{@link bpsim.impl.ScenarioImpl#getVersion <em>Version</em>}</li> * </ul> * </p> * * @generated */ public class ScenarioImpl extends EObjectImpl implements Scenario { /** * The cached value of the '{@link #getScenarioParameters() <em>Scenario Parameters</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getScenarioParameters() * @generated * @ordered */ protected ScenarioParameters scenarioParameters; /** * The cached value of the '{@link #getElementParameters() <em>Element Parameters</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getElementParameters() * @generated * @ordered */ protected EList<ElementParameters> elementParameters; /** * The cached value of the '{@link #getCalendar() <em>Calendar</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCalendar() * @generated * @ordered */ protected EList<Calendar> calendar; /** * The cached value of the '{@link #getVendorExtension() <em>Vendor Extension</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getVendorExtension() * @generated * @ordered */ protected EList<VendorExtension> vendorExtension; /** * The default value of the '{@link #getAuthor() <em>Author</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAuthor() * @generated * @ordered */ protected static final String AUTHOR_EDEFAULT = null; /** * The cached value of the '{@link #getAuthor() <em>Author</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAuthor() * @generated * @ordered */ protected String author = AUTHOR_EDEFAULT; /** * The default value of the '{@link #getCreated() <em>Created</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCreated() * @generated * @ordered */ protected static final Object CREATED_EDEFAULT = null; /** * The cached value of the '{@link #getCreated() <em>Created</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCreated() * @generated * @ordered */ protected Object created = CREATED_EDEFAULT; /** * The default value of the '{@link #getDescription() <em>Description</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDescription() * @generated * @ordered */ protected static final String DESCRIPTION_EDEFAULT = null; /** * The cached value of the '{@link #getDescription() <em>Description</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDescription() * @generated * @ordered */ protected String description = DESCRIPTION_EDEFAULT; /** * The default value of the '{@link #getId() <em>Id</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getId() * @generated * @ordered */ protected static final String ID_EDEFAULT = null; /** * The cached value of the '{@link #getId() <em>Id</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getId() * @generated * @ordered */ protected String id = ID_EDEFAULT; /** * The default value of the '{@link #getInherits() <em>Inherits</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInherits() * @generated * @ordered */ protected static final String INHERITS_EDEFAULT = null; /** * The cached value of the '{@link #getInherits() <em>Inherits</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInherits() * @generated * @ordered */ protected String inherits = INHERITS_EDEFAULT; /** * The default value of the '{@link #getModified() <em>Modified</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getModified() * @generated * @ordered */ protected static final Object MODIFIED_EDEFAULT = null; /** * The cached value of the '{@link #getModified() <em>Modified</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getModified() * @generated * @ordered */ protected Object modified = MODIFIED_EDEFAULT; /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The cached value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected String name = NAME_EDEFAULT; /** * The default value of the '{@link #getResult() <em>Result</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getResult() * @generated * @ordered */ protected static final String RESULT_EDEFAULT = null; /** * The cached value of the '{@link #getResult() <em>Result</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getResult() * @generated * @ordered */ protected String result = RESULT_EDEFAULT; /** * The default value of the '{@link #getVendor() <em>Vendor</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getVendor() * @generated * @ordered */ protected static final String VENDOR_EDEFAULT = null; /** * The cached value of the '{@link #getVendor() <em>Vendor</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getVendor() * @generated * @ordered */ protected String vendor = VENDOR_EDEFAULT; /** * The default value of the '{@link #getVersion() <em>Version</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getVersion() * @generated * @ordered */ protected static final String VERSION_EDEFAULT = null; /** * The cached value of the '{@link #getVersion() <em>Version</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getVersion() * @generated * @ordered */ protected String version = VERSION_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ScenarioImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return BpsimPackage.Literals.SCENARIO; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ScenarioParameters getScenarioParameters() { return scenarioParameters; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetScenarioParameters(ScenarioParameters newScenarioParameters, NotificationChain msgs) { ScenarioParameters oldScenarioParameters = scenarioParameters; scenarioParameters = newScenarioParameters; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__SCENARIO_PARAMETERS, oldScenarioParameters, newScenarioParameters); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setScenarioParameters(ScenarioParameters newScenarioParameters) { if (newScenarioParameters != scenarioParameters) { NotificationChain msgs = null; if (scenarioParameters != null) msgs = ((InternalEObject)scenarioParameters).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - BpsimPackage.SCENARIO__SCENARIO_PARAMETERS, null, msgs); if (newScenarioParameters != null) msgs = ((InternalEObject)newScenarioParameters).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - BpsimPackage.SCENARIO__SCENARIO_PARAMETERS, null, msgs); msgs = basicSetScenarioParameters(newScenarioParameters, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__SCENARIO_PARAMETERS, newScenarioParameters, newScenarioParameters)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<ElementParameters> getElementParameters() { if (elementParameters == null) { elementParameters = new EObjectContainmentEList<ElementParameters>(ElementParameters.class, this, BpsimPackage.SCENARIO__ELEMENT_PARAMETERS); } return elementParameters; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<Calendar> getCalendar() { if (calendar == null) { calendar = new EObjectContainmentEList<Calendar>(Calendar.class, this, BpsimPackage.SCENARIO__CALENDAR); } return calendar; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<VendorExtension> getVendorExtension() { if (vendorExtension == null) { vendorExtension = new EObjectContainmentEList<VendorExtension>(VendorExtension.class, this, BpsimPackage.SCENARIO__VENDOR_EXTENSION); } return vendorExtension; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getAuthor() { return author; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setAuthor(String newAuthor) { String oldAuthor = author; author = newAuthor; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__AUTHOR, oldAuthor, author)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Object getCreated() { return created; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCreated(Object newCreated) { Object oldCreated = created; created = newCreated; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__CREATED, oldCreated, created)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getDescription() { return description; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setDescription(String newDescription) { String oldDescription = description; description = newDescription; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__DESCRIPTION, oldDescription, description)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getId() { return id; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setId(String newId) { String oldId = id; id = newId; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__ID, oldId, id)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getInherits() { return inherits; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setInherits(String newInherits) { String oldInherits = inherits; inherits = newInherits; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__INHERITS, oldInherits, inherits)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Object getModified() { return modified; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setModified(Object newModified) { Object oldModified = modified; modified = newModified; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__MODIFIED, oldModified, modified)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName(String newName) { String oldName = name; name = newName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__NAME, oldName, name)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getResult() { return result; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setResult(String newResult) { String oldResult = result; result = newResult; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__RESULT, oldResult, result)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getVendor() { return vendor; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setVendor(String newVendor) { String oldVendor = vendor; vendor = newVendor; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__VENDOR, oldVendor, vendor)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getVersion() { return version; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setVersion(String newVersion) { String oldVersion = version; version = newVersion; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, BpsimPackage.SCENARIO__VERSION, oldVersion, version)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case BpsimPackage.SCENARIO__SCENARIO_PARAMETERS: return basicSetScenarioParameters(null, msgs); case BpsimPackage.SCENARIO__ELEMENT_PARAMETERS: return ((InternalEList<?>)getElementParameters()).basicRemove(otherEnd, msgs); case BpsimPackage.SCENARIO__CALENDAR: return ((InternalEList<?>)getCalendar()).basicRemove(otherEnd, msgs); case BpsimPackage.SCENARIO__VENDOR_EXTENSION: return ((InternalEList<?>)getVendorExtension()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case BpsimPackage.SCENARIO__SCENARIO_PARAMETERS: return getScenarioParameters(); case BpsimPackage.SCENARIO__ELEMENT_PARAMETERS: return getElementParameters(); case BpsimPackage.SCENARIO__CALENDAR: return getCalendar(); case BpsimPackage.SCENARIO__VENDOR_EXTENSION: return getVendorExtension(); case BpsimPackage.SCENARIO__AUTHOR: return getAuthor(); case BpsimPackage.SCENARIO__CREATED: return getCreated(); case BpsimPackage.SCENARIO__DESCRIPTION: return getDescription(); case BpsimPackage.SCENARIO__ID: return getId(); case BpsimPackage.SCENARIO__INHERITS: return getInherits(); case BpsimPackage.SCENARIO__MODIFIED: return getModified(); case BpsimPackage.SCENARIO__NAME: return getName(); case BpsimPackage.SCENARIO__RESULT: return getResult(); case BpsimPackage.SCENARIO__VENDOR: return getVendor(); case BpsimPackage.SCENARIO__VERSION: return getVersion(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case BpsimPackage.SCENARIO__SCENARIO_PARAMETERS: setScenarioParameters((ScenarioParameters)newValue); return; case BpsimPackage.SCENARIO__ELEMENT_PARAMETERS: getElementParameters().clear(); getElementParameters().addAll((Collection<? extends ElementParameters>)newValue); return; case BpsimPackage.SCENARIO__CALENDAR: getCalendar().clear(); getCalendar().addAll((Collection<? extends Calendar>)newValue); return; case BpsimPackage.SCENARIO__VENDOR_EXTENSION: getVendorExtension().clear(); getVendorExtension().addAll((Collection<? extends VendorExtension>)newValue); return; case BpsimPackage.SCENARIO__AUTHOR: setAuthor((String)newValue); return; case BpsimPackage.SCENARIO__CREATED: setCreated(newValue); return; case BpsimPackage.SCENARIO__DESCRIPTION: setDescription((String)newValue); return; case BpsimPackage.SCENARIO__ID: setId((String)newValue); return; case BpsimPackage.SCENARIO__INHERITS: setInherits((String)newValue); return; case BpsimPackage.SCENARIO__MODIFIED: setModified(newValue); return; case BpsimPackage.SCENARIO__NAME: setName((String)newValue); return; case BpsimPackage.SCENARIO__RESULT: setResult((String)newValue); return; case BpsimPackage.SCENARIO__VENDOR: setVendor((String)newValue); return; case BpsimPackage.SCENARIO__VERSION: setVersion((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case BpsimPackage.SCENARIO__SCENARIO_PARAMETERS: setScenarioParameters((ScenarioParameters)null); return; case BpsimPackage.SCENARIO__ELEMENT_PARAMETERS: getElementParameters().clear(); return; case BpsimPackage.SCENARIO__CALENDAR: getCalendar().clear(); return; case BpsimPackage.SCENARIO__VENDOR_EXTENSION: getVendorExtension().clear(); return; case BpsimPackage.SCENARIO__AUTHOR: setAuthor(AUTHOR_EDEFAULT); return; case BpsimPackage.SCENARIO__CREATED: setCreated(CREATED_EDEFAULT); return; case BpsimPackage.SCENARIO__DESCRIPTION: setDescription(DESCRIPTION_EDEFAULT); return; case BpsimPackage.SCENARIO__ID: setId(ID_EDEFAULT); return; case BpsimPackage.SCENARIO__INHERITS: setInherits(INHERITS_EDEFAULT); return; case BpsimPackage.SCENARIO__MODIFIED: setModified(MODIFIED_EDEFAULT); return; case BpsimPackage.SCENARIO__NAME: setName(NAME_EDEFAULT); return; case BpsimPackage.SCENARIO__RESULT: setResult(RESULT_EDEFAULT); return; case BpsimPackage.SCENARIO__VENDOR: setVendor(VENDOR_EDEFAULT); return; case BpsimPackage.SCENARIO__VERSION: setVersion(VERSION_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case BpsimPackage.SCENARIO__SCENARIO_PARAMETERS: return scenarioParameters != null; case BpsimPackage.SCENARIO__ELEMENT_PARAMETERS: return elementParameters != null && !elementParameters.isEmpty(); case BpsimPackage.SCENARIO__CALENDAR: return calendar != null && !calendar.isEmpty(); case BpsimPackage.SCENARIO__VENDOR_EXTENSION: return vendorExtension != null && !vendorExtension.isEmpty(); case BpsimPackage.SCENARIO__AUTHOR: return AUTHOR_EDEFAULT == null ? author != null : !AUTHOR_EDEFAULT.equals(author); case BpsimPackage.SCENARIO__CREATED: return CREATED_EDEFAULT == null ? created != null : !CREATED_EDEFAULT.equals(created); case BpsimPackage.SCENARIO__DESCRIPTION: return DESCRIPTION_EDEFAULT == null ? description != null : !DESCRIPTION_EDEFAULT.equals(description); case BpsimPackage.SCENARIO__ID: return ID_EDEFAULT == null ? id != null : !ID_EDEFAULT.equals(id); case BpsimPackage.SCENARIO__INHERITS: return INHERITS_EDEFAULT == null ? inherits != null : !INHERITS_EDEFAULT.equals(inherits); case BpsimPackage.SCENARIO__MODIFIED: return MODIFIED_EDEFAULT == null ? modified != null : !MODIFIED_EDEFAULT.equals(modified); case BpsimPackage.SCENARIO__NAME: return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name); case BpsimPackage.SCENARIO__RESULT: return RESULT_EDEFAULT == null ? result != null : !RESULT_EDEFAULT.equals(result); case BpsimPackage.SCENARIO__VENDOR: return VENDOR_EDEFAULT == null ? vendor != null : !VENDOR_EDEFAULT.equals(vendor); case BpsimPackage.SCENARIO__VERSION: return VERSION_EDEFAULT == null ? version != null : !VERSION_EDEFAULT.equals(version); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (author: "); result.append(author); result.append(", created: "); result.append(created); result.append(", description: "); result.append(description); result.append(", id: "); result.append(id); result.append(", inherits: "); result.append(inherits); result.append(", modified: "); result.append(modified); result.append(", name: "); result.append(name); result.append(", result: "); result.append(result); result.append(", vendor: "); result.append(vendor); result.append(", version: "); result.append(version); result.append(')'); return result.toString(); } } //ScenarioImpl
package com.atlassian.maven.plugins.jgitflow.mojo; import java.io.File; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.*; import com.atlassian.maven.jgitflow.api.MavenJGitFlowExtension; import com.atlassian.maven.plugins.jgitflow.FlowInitContext; import com.atlassian.maven.plugins.jgitflow.provider.ContextProvider; import com.atlassian.maven.plugins.jgitflow.provider.MavenSessionProvider; import com.atlassian.maven.plugins.jgitflow.provider.ReactorProjectsProvider; import com.google.common.base.Strings; import org.apache.maven.artifact.DependencyResolutionRequiredException; import org.apache.maven.execution.MavenSession; import org.apache.maven.plugin.AbstractMojo; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugins.annotations.Component; import org.apache.maven.plugins.annotations.Parameter; import org.apache.maven.project.MavenProject; import org.apache.maven.settings.Settings; /** * @since version */ public abstract class AbstractJGitFlowMojo extends AbstractMojo { @Component protected MavenProject project; @Component protected MavenSession session; @Component private Settings settings; @Parameter(defaultValue = "${basedir}", readonly = true, required = true) private File basedir; @Parameter(defaultValue = "${reactorProjects}", readonly = true, required = true) private List<MavenProject> reactorProjects; @Parameter(defaultValue = "${flowInitContext}") private FlowInitContext flowInitContext; @Parameter(defaultValue = "false", property = "enableSshAgent") protected boolean enableSshAgent = false; /** * Whether to allow SNAPSHOT dependencies. Default is to fail when finding any SNAPSHOT. * */ @Parameter( defaultValue = "false", property = "allowSnapshots" ) protected boolean allowSnapshots = false; @Parameter(defaultValue = "false", property = "allowUntracked") protected boolean allowUntracked = false; @Parameter(property = "offline", defaultValue = "${settings.offline}") protected boolean offline; @Parameter(property = "localOnly", defaultValue = "false") protected boolean localOnly = false; @Parameter(property = "defaultOriginUrl", defaultValue = "") protected String defaultOriginUrl = ""; @Parameter(property = "scmCommentPrefix", defaultValue = "") protected String scmCommentPrefix = ""; @Parameter(property = "scmCommentSuffix", defaultValue = "") protected String scmCommentSuffix = ""; @Parameter(property = "username", defaultValue = "") protected String username = ""; @Parameter(property = "password", defaultValue = "") protected String password = ""; @Parameter(defaultValue = "true", property = "alwaysUpdateOrigin") protected boolean alwaysUpdateOrigin = true; @Parameter( defaultValue = "false", property = "pullMaster" ) protected boolean pullMaster = false; @Parameter( defaultValue = "false", property = "pullDevelop" ) protected boolean pullDevelop = false; Settings getSettings() { return settings; } protected final File getBasedir() { return basedir; } /** * Sets the base directory of the build. * * @param basedir The build's base directory, must not be <code>null</code>. */ public void setBasedir(File basedir) { this.basedir = basedir; } /** * Gets the list of projects in the build reactor. * * @return The list of reactor project, never <code>null</code>. */ public List<MavenProject> getReactorProjects() { return reactorProjects; } public FlowInitContext getFlowInitContext() { return flowInitContext; } public void setFlowInitContext(FlowInitContext flowInitContext) { this.flowInitContext = flowInitContext; } public boolean isRemoteAllowed() { return (!offline && !localOnly); } public MavenJGitFlowExtension getExtensionInstance(String classname) throws MojoExecutionException { if(Strings.isNullOrEmpty(classname)) { return null; } try { Class<?> providerClass = Thread.currentThread().getContextClassLoader().loadClass(classname); Constructor ctr = providerClass.getConstructor(); return (MavenJGitFlowExtension) ctr.newInstance(); } catch (Exception e) { throw new MojoExecutionException("Unable to load maven jgitflow extension class '" + classname + "'",e); } } public ClassLoader getClassloader(String classpath) { List<String> pathList = Arrays.asList(classpath.split(File.pathSeparator)); List<URL> urls = new ArrayList<URL>( pathList.size() ); for ( String filename : pathList ) { try { urls.add( new File( filename ).toURL() ); } catch ( MalformedURLException e ) { //ignore } } return new URLClassLoader((URL[]) urls.toArray(new URL[urls.size()]), Thread.currentThread().getContextClassLoader()); } protected String getClasspath() throws MojoExecutionException { Set<String> allPaths = new HashSet<String>(); StringBuffer finalPath = new StringBuffer(File.pathSeparator + project.getBuild().getOutputDirectory()); try { allPaths.addAll(project.getCompileClasspathElements()); allPaths.addAll(project.getRuntimeClasspathElements()); allPaths.addAll(project.getSystemClasspathElements()); URL[] pluginUrls = ((URLClassLoader)Thread.currentThread().getContextClassLoader()).getURLs(); for(URL pluginUrl : pluginUrls) { allPaths.add(new File(pluginUrl.getFile()).getPath()); } for(String path : allPaths) { finalPath.append(File.pathSeparator); finalPath.append(path); } return finalPath.toString(); } catch (DependencyResolutionRequiredException e) { throw new MojoExecutionException("Dependencies must be resolved", e); } } }
// Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.base.process_launcher; import android.content.ComponentName; import android.content.Context; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.UserManager; import androidx.annotation.VisibleForTesting; import androidx.collection.ArraySet; import org.chromium.base.BuildInfo; import org.chromium.base.ContextUtils; import org.chromium.base.Log; import org.chromium.base.SysUtils; import org.chromium.base.compat.ApiHelperForM; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Queue; /** * This class is responsible for allocating and managing connections to child * process services. These connections are in a pool (the services are defined * in the AndroidManifest.xml). */ public abstract class ChildConnectionAllocator { private static final String TAG = "ChildConnAllocator"; private static final String ZYGOTE_SUFFIX = "0"; private static final String NON_ZYGOTE_SUFFIX = "1"; /** Factory interface. Used by tests to specialize created connections. */ @VisibleForTesting public interface ConnectionFactory { ChildProcessConnection createConnection(Context context, ComponentName serviceName, boolean bindToCaller, boolean bindAsExternalService, Bundle serviceBundle, String instanceName); } /** Default implementation of the ConnectionFactory that creates actual connections. */ private static class ConnectionFactoryImpl implements ConnectionFactory { @Override public ChildProcessConnection createConnection(Context context, ComponentName serviceName, boolean bindToCaller, boolean bindAsExternalService, Bundle serviceBundle, String instanceName) { return new ChildProcessConnection(context, serviceName, bindToCaller, bindAsExternalService, serviceBundle, instanceName); } } // Delay between the call to freeConnection and the connection actually beeing freed. private static final long FREE_CONNECTION_DELAY_MILLIS = 1; // Max number of connections allocated for variable allocator. private static final int MAX_VARIABLE_ALLOCATED = 100; // Runnable which will be called when allocator wants to allocate a new connection, but does // not have any more free slots. May be null. private final Runnable mFreeSlotCallback; private final Queue<Runnable> mPendingAllocations = new ArrayDeque<>(); // The handler of the thread on which all interations should happen. private final Handler mLauncherHandler; /* package */ final String mPackageName; /* package */ final String mServiceClassName; /* package */ final boolean mBindToCaller; /* package */ final boolean mBindAsExternalService; /* package */ final boolean mUseStrongBinding; /* package */ ConnectionFactory mConnectionFactory = new ConnectionFactoryImpl(); private static void checkServiceExists( Context context, String packageName, String serviceClassName) { PackageManager packageManager = context.getPackageManager(); // Check that the service exists. try { // PackageManager#getServiceInfo() throws an exception if the service does not exist. packageManager.getServiceInfo( new ComponentName(packageName, serviceClassName + "0"), 0); } catch (PackageManager.NameNotFoundException e) { throw new RuntimeException("Illegal meta data value: the child service doesn't exist"); } } /** * Factory method that retrieves the service name and number of service from the * AndroidManifest.xml. */ public static ChildConnectionAllocator create(Context context, Handler launcherHandler, Runnable freeSlotCallback, String packageName, String serviceClassName, String numChildServicesManifestKey, boolean bindToCaller, boolean bindAsExternalService, boolean useStrongBinding) { int numServices = -1; PackageManager packageManager = context.getPackageManager(); try { ApplicationInfo appInfo = packageManager.getApplicationInfo(packageName, PackageManager.GET_META_DATA); if (appInfo.metaData != null) { numServices = appInfo.metaData.getInt(numChildServicesManifestKey, -1); } } catch (PackageManager.NameNotFoundException e) { throw new RuntimeException("Could not get application info."); } if (numServices < 0) { throw new RuntimeException("Illegal meta data value for number of child services"); } checkServiceExists(context, packageName, serviceClassName); return new FixedSizeAllocatorImpl(launcherHandler, freeSlotCallback, packageName, serviceClassName, bindToCaller, bindAsExternalService, useStrongBinding, numServices); } public static ChildConnectionAllocator createVariableSize(Context context, Handler launcherHandler, Runnable freeSlotCallback, String packageName, String serviceClassName, boolean bindToCaller, boolean bindAsExternalService, boolean useStrongBinding) { checkServiceExists(context, packageName, serviceClassName); // OnePlus devices are having trouble with app zygote in combination with dynamic // feature modules. See crbug.com/1064314 for details. BuildInfo buildInfo = BuildInfo.getInstance(); boolean disableZygote = Build.VERSION.SDK_INT == 29 && buildInfo.androidBuildFingerprint.startsWith("OnePlus/"); if (Build.VERSION.SDK_INT == 29 && !disableZygote) { UserManager userManager = (UserManager) ContextUtils.getApplicationContext().getSystemService( Context.USER_SERVICE); if (!ApiHelperForM.isSystemUser(userManager)) { return new Android10WorkaroundAllocatorImpl(launcherHandler, freeSlotCallback, packageName, serviceClassName, bindToCaller, bindAsExternalService, useStrongBinding, MAX_VARIABLE_ALLOCATED); } } // On low end devices, we do not expect to have many renderers. As a consequence, the fixed // costs of the app zygote are not recovered. See https://crbug.com/1044579 for context and // experimental results. String suffix = (SysUtils.isLowEndDevice() || disableZygote) ? NON_ZYGOTE_SUFFIX : ZYGOTE_SUFFIX; return new VariableSizeAllocatorImpl(launcherHandler, freeSlotCallback, packageName, serviceClassName + suffix, bindToCaller, bindAsExternalService, useStrongBinding, MAX_VARIABLE_ALLOCATED); } /** * Factory method used with some tests to create an allocator with values passed in directly * instead of being retrieved from the AndroidManifest.xml. */ @VisibleForTesting public static FixedSizeAllocatorImpl createFixedForTesting(Runnable freeSlotCallback, String packageName, String serviceClassName, int serviceCount, boolean bindToCaller, boolean bindAsExternalService, boolean useStrongBinding) { return new FixedSizeAllocatorImpl(new Handler(), freeSlotCallback, packageName, serviceClassName, bindToCaller, bindAsExternalService, useStrongBinding, serviceCount); } @VisibleForTesting public static VariableSizeAllocatorImpl createVariableSizeForTesting(Handler launcherHandler, String packageName, Runnable freeSlotCallback, String serviceClassName, boolean bindToCaller, boolean bindAsExternalService, boolean useStrongBinding, int maxAllocated) { return new VariableSizeAllocatorImpl(launcherHandler, freeSlotCallback, packageName, serviceClassName + ZYGOTE_SUFFIX, bindToCaller, bindAsExternalService, useStrongBinding, maxAllocated); } @VisibleForTesting public static Android10WorkaroundAllocatorImpl createWorkaroundForTesting( Handler launcherHandler, String packageName, Runnable freeSlotCallback, String serviceClassName, boolean bindToCaller, boolean bindAsExternalService, boolean useStrongBinding, int maxAllocated) { return new Android10WorkaroundAllocatorImpl(launcherHandler, freeSlotCallback, packageName, serviceClassName, bindToCaller, bindAsExternalService, useStrongBinding, maxAllocated); } private ChildConnectionAllocator(Handler launcherHandler, Runnable freeSlotCallback, String packageName, String serviceClassName, boolean bindToCaller, boolean bindAsExternalService, boolean useStrongBinding) { mLauncherHandler = launcherHandler; assert isRunningOnLauncherThread(); mFreeSlotCallback = freeSlotCallback; mPackageName = packageName; mServiceClassName = serviceClassName; mBindToCaller = bindToCaller; mBindAsExternalService = bindAsExternalService; mUseStrongBinding = useStrongBinding; } /** @return a bound connection, or null if there are no free slots. */ public ChildProcessConnection allocate(Context context, Bundle serviceBundle, final ChildProcessConnection.ServiceCallback serviceCallback) { assert isRunningOnLauncherThread(); // Wrap the service callbacks so that: // - we can intercept onChildProcessDied and clean-up connections // - the callbacks are actually posted so that this method will return before the callbacks // are called (so that the caller may set any reference to the returned connection before // any callback logic potentially tries to access that connection). ChildProcessConnection.ServiceCallback serviceCallbackWrapper = new ChildProcessConnection.ServiceCallback() { @Override public void onChildStarted() { assert isRunningOnLauncherThread(); if (serviceCallback != null) { mLauncherHandler.post(new Runnable() { @Override public void run() { serviceCallback.onChildStarted(); } }); } } @Override public void onChildStartFailed(final ChildProcessConnection connection) { assert isRunningOnLauncherThread(); if (serviceCallback != null) { mLauncherHandler.post(new Runnable() { @Override public void run() { serviceCallback.onChildStartFailed(connection); } }); } freeConnectionWithDelay(connection); } @Override public void onChildProcessDied(final ChildProcessConnection connection) { assert isRunningOnLauncherThread(); if (serviceCallback != null) { mLauncherHandler.post(new Runnable() { @Override public void run() { serviceCallback.onChildProcessDied(connection); } }); } freeConnectionWithDelay(connection); } private void freeConnectionWithDelay(final ChildProcessConnection connection) { // Freeing a service should be delayed. This is so that we avoid immediately // reusing the freed service (see http://crbug.com/164069): the framework // might keep a service process alive when it's been unbound for a short // time. If a new connection to the same service is bound at that point, the // process is reused and bad things happen (mostly static variables are set // when we don't expect them to). mLauncherHandler.postDelayed(new Runnable() { @Override public void run() { free(connection); } }, FREE_CONNECTION_DELAY_MILLIS); } }; return doAllocate(context, serviceBundle, serviceCallbackWrapper); } /** Free connection allocated by this allocator. */ private void free(ChildProcessConnection connection) { assert isRunningOnLauncherThread(); doFree(connection); if (mPendingAllocations.isEmpty()) return; mPendingAllocations.remove().run(); if (!mPendingAllocations.isEmpty() && mFreeSlotCallback != null) { mFreeSlotCallback.run(); } } public final void queueAllocation(Runnable runnable) { assert isRunningOnLauncherThread(); boolean wasEmpty = mPendingAllocations.isEmpty(); mPendingAllocations.add(runnable); if (wasEmpty && mFreeSlotCallback != null) mFreeSlotCallback.run(); } /** May return -1 if size is not fixed. */ public abstract int getNumberOfServices(); @VisibleForTesting public abstract boolean anyConnectionAllocated(); /** @return the count of connections managed by the allocator */ @VisibleForTesting public abstract int allocatedConnectionsCountForTesting(); @VisibleForTesting public void setConnectionFactoryForTesting(ConnectionFactory connectionFactory) { mConnectionFactory = connectionFactory; } private boolean isRunningOnLauncherThread() { return mLauncherHandler.getLooper() == Looper.myLooper(); } /* package */ abstract ChildProcessConnection doAllocate(Context context, Bundle serviceBundle, ChildProcessConnection.ServiceCallback serviceCallback); /* package */ abstract void doFree(ChildProcessConnection connection); /** Implementation class accessed directly by tests. */ @VisibleForTesting public static class FixedSizeAllocatorImpl extends ChildConnectionAllocator { // Connections to services. Indices of the array correspond to the service numbers. private final ChildProcessConnection[] mChildProcessConnections; // The list of free (not bound) service indices. private final ArrayList<Integer> mFreeConnectionIndices; private FixedSizeAllocatorImpl(Handler launcherHandler, Runnable freeSlotCallback, String packageName, String serviceClassName, boolean bindToCaller, boolean bindAsExternalService, boolean useStrongBinding, int numChildServices) { super(launcherHandler, freeSlotCallback, packageName, serviceClassName, bindToCaller, bindAsExternalService, useStrongBinding); mChildProcessConnections = new ChildProcessConnection[numChildServices]; mFreeConnectionIndices = new ArrayList<Integer>(numChildServices); for (int i = 0; i < numChildServices; i++) { mFreeConnectionIndices.add(i); } } @Override /* package */ ChildProcessConnection doAllocate(Context context, Bundle serviceBundle, ChildProcessConnection.ServiceCallback serviceCallback) { if (mFreeConnectionIndices.isEmpty()) { Log.d(TAG, "Ran out of services to allocate."); return null; } int slot = mFreeConnectionIndices.remove(0); assert mChildProcessConnections[slot] == null; ComponentName serviceName = new ComponentName(mPackageName, mServiceClassName + slot); ChildProcessConnection connection = mConnectionFactory.createConnection(context, serviceName, mBindToCaller, mBindAsExternalService, serviceBundle, null /* instanceName */); mChildProcessConnections[slot] = connection; Log.d(TAG, "Allocator allocated and bound a connection, name: %s, slot: %d", mServiceClassName, slot); connection.start(mUseStrongBinding, serviceCallback); return connection; } @Override /* package */ void doFree(ChildProcessConnection connection) { // mChildProcessConnections is relatively short (40 items at max at this point). // We are better of iterating than caching in a map. int slot = Arrays.asList(mChildProcessConnections).indexOf(connection); if (slot == -1) { Log.e(TAG, "Unable to find connection to free."); assert false; } else { mChildProcessConnections[slot] = null; assert !mFreeConnectionIndices.contains(slot); mFreeConnectionIndices.add(slot); Log.d(TAG, "Allocator freed a connection, name: %s, slot: %d", mServiceClassName, slot); } } @VisibleForTesting public boolean isFreeConnectionAvailable() { return !mFreeConnectionIndices.isEmpty(); } @Override public int getNumberOfServices() { return mChildProcessConnections.length; } @Override public int allocatedConnectionsCountForTesting() { return mChildProcessConnections.length - mFreeConnectionIndices.size(); } @VisibleForTesting public ChildProcessConnection getChildProcessConnectionAtSlotForTesting(int slotNumber) { return mChildProcessConnections[slotNumber]; } @Override public boolean anyConnectionAllocated() { return mFreeConnectionIndices.size() < mChildProcessConnections.length; } } @VisibleForTesting /* package */ static class VariableSizeAllocatorImpl extends ChildConnectionAllocator { private final int mMaxAllocated; private final ArraySet<ChildProcessConnection> mAllocatedConnections = new ArraySet<>(); private int mNextInstance; // Note |serviceClassName| includes the service suffix. private VariableSizeAllocatorImpl(Handler launcherHandler, Runnable freeSlotCallback, String packageName, String serviceClassName, boolean bindToCaller, boolean bindAsExternalService, boolean useStrongBinding, int maxAllocated) { super(launcherHandler, freeSlotCallback, packageName, serviceClassName, bindToCaller, bindAsExternalService, useStrongBinding); assert maxAllocated > 0; mMaxAllocated = maxAllocated; } @Override /* package */ ChildProcessConnection doAllocate(Context context, Bundle serviceBundle, ChildProcessConnection.ServiceCallback serviceCallback) { ChildProcessConnection connection = allocate(context, serviceBundle); if (connection == null) return null; mAllocatedConnections.add(connection); connection.start(mUseStrongBinding, serviceCallback); return connection; } /* package */ ChildProcessConnection tryAllocate(Context context, Bundle serviceBundle, ChildProcessConnection.ServiceCallback serviceCallback) { ChildProcessConnection connection = allocate(context, serviceBundle); if (connection == null) return null; boolean startResult = connection.tryStart(mUseStrongBinding, serviceCallback); if (!startResult) return null; mAllocatedConnections.add(connection); return connection; } private ChildProcessConnection allocate(Context context, Bundle serviceBundle) { if (mAllocatedConnections.size() >= mMaxAllocated) { Log.d(TAG, "Ran out of UIDs to allocate."); return null; } ComponentName serviceName = new ComponentName(mPackageName, mServiceClassName); String instanceName = Integer.toString(mNextInstance); mNextInstance++; ChildProcessConnection connection = mConnectionFactory.createConnection(context, serviceName, mBindToCaller, mBindAsExternalService, serviceBundle, instanceName); assert connection != null; return connection; } @Override /* package */ void doFree(ChildProcessConnection connection) { boolean result = mAllocatedConnections.remove(connection); assert result; } /* package */ boolean wasConnectionAllocated(ChildProcessConnection connection) { return mAllocatedConnections.contains(connection); } @Override public int getNumberOfServices() { return -1; } @Override public int allocatedConnectionsCountForTesting() { return mAllocatedConnections.size(); } @Override public boolean anyConnectionAllocated() { return mAllocatedConnections.size() > 0; } } /** * Workaround allocator for Android 10 bug. * Android 10 has a bug that UID used for non-primary user cannot be freed correctly, * eventually exhausting the pool of UIDs for isolated services. There is a global pool of * 1000 UIDs, and each app zygote has a smaller pool of 100; the bug appplies to both cases. * The leaked UID in the app zygote pool are released when the zygote is killed; leaked UIDs in * the global pool are released when the device is rebooted. So way to slightly delay until the * device needs to be rebooted is to use up the app zygote pool first before using the * non-zygote global pool. */ private static class Android10WorkaroundAllocatorImpl extends ChildConnectionAllocator { private final VariableSizeAllocatorImpl mZygoteAllocator; private final VariableSizeAllocatorImpl mNonZygoteAllocator; private Android10WorkaroundAllocatorImpl(Handler launcherHandler, Runnable freeSlotCallback, String packageName, String serviceClassName, boolean bindToCaller, boolean bindAsExternalService, boolean useStrongBinding, int maxAllocated) { super(launcherHandler, freeSlotCallback, packageName, serviceClassName, bindToCaller, bindAsExternalService, useStrongBinding); mZygoteAllocator = new VariableSizeAllocatorImpl(launcherHandler, freeSlotCallback, packageName, serviceClassName + ZYGOTE_SUFFIX, bindToCaller, bindAsExternalService, useStrongBinding, maxAllocated); mNonZygoteAllocator = new VariableSizeAllocatorImpl(launcherHandler, freeSlotCallback, packageName, serviceClassName + NON_ZYGOTE_SUFFIX, bindToCaller, bindAsExternalService, useStrongBinding, maxAllocated); } @Override /* package */ ChildProcessConnection doAllocate(Context context, Bundle serviceBundle, ChildProcessConnection.ServiceCallback serviceCallback) { ChildProcessConnection connection = mZygoteAllocator.tryAllocate(context, serviceBundle, serviceCallback); if (connection != null) return connection; return mNonZygoteAllocator.doAllocate(context, serviceBundle, serviceCallback); } @Override /* package */ void doFree(ChildProcessConnection connection) { if (mZygoteAllocator.wasConnectionAllocated(connection)) { mZygoteAllocator.doFree(connection); } else if (mNonZygoteAllocator.wasConnectionAllocated(connection)) { mNonZygoteAllocator.doFree(connection); } else { assert false; } } @Override public int getNumberOfServices() { return -1; } @Override public int allocatedConnectionsCountForTesting() { return mZygoteAllocator.allocatedConnectionsCountForTesting() + mNonZygoteAllocator.allocatedConnectionsCountForTesting(); } @Override public boolean anyConnectionAllocated() { return mZygoteAllocator.anyConnectionAllocated() || mNonZygoteAllocator.anyConnectionAllocated(); } @Override public void setConnectionFactoryForTesting(ConnectionFactory connectionFactory) { super.setConnectionFactoryForTesting(connectionFactory); mZygoteAllocator.setConnectionFactoryForTesting(connectionFactory); mNonZygoteAllocator.setConnectionFactoryForTesting(connectionFactory); } } }
/* * Copyright LWJGL. All rights reserved. * License terms: http://lwjgl.org/license.php * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.opengl; import java.nio.*; import static org.lwjgl.system.Checks.*; import static org.lwjgl.system.JNI.*; import static org.lwjgl.system.MemoryUtil.*; /** * The core OpenGL 2.1 functionality. OpenGL 2.1 implementations must support at least revision 1.20 of the OpenGL Shading Language. * * <p>Extensions promoted to core in this release:</p> * * <ul> * <li><a href="http://www.opengl.org/registry/specs/ARB/pixel_buffer_object.txt">ARB_pixel_buffer_object</a></li> * <li><a href="http://www.opengl.org/registry/specs/EXT/texture_sRGB.txt">EXT_texture_sRGB</a></li> * </ul> */ public class GL21 { /** Accepted by the {@code pname} parameter of GetBooleanv, GetIntegerv, GetFloatv, and GetDoublev. */ public static final int GL_CURRENT_RASTER_SECONDARY_COLOR = 0x845F; /** Returned by the {@code type} parameter of GetActiveUniform. */ public static final int GL_FLOAT_MAT2x3 = 0x8B65, GL_FLOAT_MAT2x4 = 0x8B66, GL_FLOAT_MAT3x2 = 0x8B67, GL_FLOAT_MAT3x4 = 0x8B68, GL_FLOAT_MAT4x2 = 0x8B69, GL_FLOAT_MAT4x3 = 0x8B6A; /** * Accepted by the {@code target} parameters of BindBuffer, BufferData, BufferSubData, MapBuffer, UnmapBuffer, GetBufferSubData, GetBufferParameteriv, and * GetBufferPointerv. */ public static final int GL_PIXEL_PACK_BUFFER = 0x88EB, GL_PIXEL_UNPACK_BUFFER = 0x88EC; /** Accepted by the {@code pname} parameter of GetBooleanv, GetIntegerv, GetFloatv, and GetDoublev. */ public static final int GL_PIXEL_PACK_BUFFER_BINDING = 0x88ED, GL_PIXEL_UNPACK_BUFFER_BINDING = 0x88EF; /** Accepted by the {@code internalformat} parameter of TexImage1D, TexImage2D, TexImage3D, CopyTexImage1D, CopyTexImage2D. */ public static final int GL_SRGB = 0x8C40, GL_SRGB8 = 0x8C41, GL_SRGB_ALPHA = 0x8C42, GL_SRGB8_ALPHA8 = 0x8C43, GL_SLUMINANCE_ALPHA = 0x8C44, GL_SLUMINANCE8_ALPHA8 = 0x8C45, GL_SLUMINANCE = 0x8C46, GL_SLUMINANCE8 = 0x8C47, GL_COMPRESSED_SRGB = 0x8C48, GL_COMPRESSED_SRGB_ALPHA = 0x8C49, GL_COMPRESSED_SLUMINANCE = 0x8C4A, GL_COMPRESSED_SLUMINANCE_ALPHA = 0x8C4B; protected GL21() { throw new UnsupportedOperationException(); } static boolean isAvailable(GLCapabilities caps) { return checkFunctions( caps.glUniformMatrix2x3fv, caps.glUniformMatrix3x2fv, caps.glUniformMatrix2x4fv, caps.glUniformMatrix4x2fv, caps.glUniformMatrix3x4fv, caps.glUniformMatrix4x3fv ); } // --- [ glUniformMatrix2x3fv ] --- /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix2x3.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat2x3 uniform variable or a mat2x3 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param count the number of matrices that are to be modified. This should be 1 if the targeted uniform variable is not an array of matrices, and 1 or more if it is an array of matrices. * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void nglUniformMatrix2x3fv(int location, int count, boolean transpose, long value) { long __functionAddress = GL.getCapabilities().glUniformMatrix2x3fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, count, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix2x3.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat2x3 uniform variable or a mat2x3 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void glUniformMatrix2x3fv(int location, boolean transpose, FloatBuffer value) { nglUniformMatrix2x3fv(location, value.remaining() / 6, transpose, memAddress(value)); } // --- [ glUniformMatrix3x2fv ] --- /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix3x2.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat3x2 uniform variable or a mat3x2 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param count the number of matrices that are to be modified. This should be 1 if the targeted uniform variable is not an array of matrices, and 1 or more if it is an array of matrices. * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void nglUniformMatrix3x2fv(int location, int count, boolean transpose, long value) { long __functionAddress = GL.getCapabilities().glUniformMatrix3x2fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, count, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix3x2.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat3x2 uniform variable or a mat3x2 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void glUniformMatrix3x2fv(int location, boolean transpose, FloatBuffer value) { nglUniformMatrix3x2fv(location, value.remaining() / 6, transpose, memAddress(value)); } // --- [ glUniformMatrix2x4fv ] --- /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix2x4.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat2x4 uniform variable or a mat2x4 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param count the number of matrices that are to be modified. This should be 1 if the targeted uniform variable is not an array of matrices, and 1 or more if it is an array of matrices. * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void nglUniformMatrix2x4fv(int location, int count, boolean transpose, long value) { long __functionAddress = GL.getCapabilities().glUniformMatrix2x4fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, count, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix2x4.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat2x4 uniform variable or a mat2x4 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void glUniformMatrix2x4fv(int location, boolean transpose, FloatBuffer value) { nglUniformMatrix2x4fv(location, value.remaining() >> 3, transpose, memAddress(value)); } // --- [ glUniformMatrix4x2fv ] --- /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix4x2.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat4x2 uniform variable or a mat4x2 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param count the number of matrices that are to be modified. This should be 1 if the targeted uniform variable is not an array of matrices, and 1 or more if it is an array of matrices. * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void nglUniformMatrix4x2fv(int location, int count, boolean transpose, long value) { long __functionAddress = GL.getCapabilities().glUniformMatrix4x2fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, count, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix4x2.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat4x2 uniform variable or a mat4x2 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void glUniformMatrix4x2fv(int location, boolean transpose, FloatBuffer value) { nglUniformMatrix4x2fv(location, value.remaining() >> 3, transpose, memAddress(value)); } // --- [ glUniformMatrix3x4fv ] --- /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix3x4.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat3x4 uniform variable or a mat3x4 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param count the number of matrices that are to be modified. This should be 1 if the targeted uniform variable is not an array of matrices, and 1 or more if it is an array of matrices. * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void nglUniformMatrix3x4fv(int location, int count, boolean transpose, long value) { long __functionAddress = GL.getCapabilities().glUniformMatrix3x4fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, count, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix3x4.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat3x4 uniform variable or a mat3x4 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void glUniformMatrix3x4fv(int location, boolean transpose, FloatBuffer value) { nglUniformMatrix3x4fv(location, value.remaining() / 12, transpose, memAddress(value)); } // --- [ glUniformMatrix4x3fv ] --- /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix4x3.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat4x3 uniform variable or a mat4x3 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param count the number of matrices that are to be modified. This should be 1 if the targeted uniform variable is not an array of matrices, and 1 or more if it is an array of matrices. * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void nglUniformMatrix4x3fv(int location, int count, boolean transpose, long value) { long __functionAddress = GL.getCapabilities().glUniformMatrix4x3fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, count, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix4x3.xhtml">OpenGL SDK Reference</a></p> * * Specifies the value of a single mat4x3 uniform variable or a mat4x3 uniform variable array for the current program object. * * @param location the location of the uniform variable to be modified * @param transpose whether to transpose the matrix as the values are loaded into the uniform variable * @param value a pointer to an array of {@code count} values that will be used to update the specified uniform variable */ public static void glUniformMatrix4x3fv(int location, boolean transpose, FloatBuffer value) { nglUniformMatrix4x3fv(location, value.remaining() / 12, transpose, memAddress(value)); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix2x3.xhtml">OpenGL SDK Reference</a></p> * * Array version of: {@link #glUniformMatrix2x3fv UniformMatrix2x3fv} */ public static void glUniformMatrix2x3fv(int location, boolean transpose, float[] value) { long __functionAddress = GL.getCapabilities().glUniformMatrix2x3fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, value.length / 6, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix3x2.xhtml">OpenGL SDK Reference</a></p> * * Array version of: {@link #glUniformMatrix3x2fv UniformMatrix3x2fv} */ public static void glUniformMatrix3x2fv(int location, boolean transpose, float[] value) { long __functionAddress = GL.getCapabilities().glUniformMatrix3x2fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, value.length / 6, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix2x4.xhtml">OpenGL SDK Reference</a></p> * * Array version of: {@link #glUniformMatrix2x4fv UniformMatrix2x4fv} */ public static void glUniformMatrix2x4fv(int location, boolean transpose, float[] value) { long __functionAddress = GL.getCapabilities().glUniformMatrix2x4fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, value.length >> 3, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix4x2.xhtml">OpenGL SDK Reference</a></p> * * Array version of: {@link #glUniformMatrix4x2fv UniformMatrix4x2fv} */ public static void glUniformMatrix4x2fv(int location, boolean transpose, float[] value) { long __functionAddress = GL.getCapabilities().glUniformMatrix4x2fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, value.length >> 3, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix3x4.xhtml">OpenGL SDK Reference</a></p> * * Array version of: {@link #glUniformMatrix3x4fv UniformMatrix3x4fv} */ public static void glUniformMatrix3x4fv(int location, boolean transpose, float[] value) { long __functionAddress = GL.getCapabilities().glUniformMatrix3x4fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, value.length / 12, transpose, value); } /** * <p><a href="http://www.opengl.org/sdk/docs/man/html/glUniformMatrix4x3.xhtml">OpenGL SDK Reference</a></p> * * Array version of: {@link #glUniformMatrix4x3fv UniformMatrix4x3fv} */ public static void glUniformMatrix4x3fv(int location, boolean transpose, float[] value) { long __functionAddress = GL.getCapabilities().glUniformMatrix4x3fv; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, location, value.length / 12, transpose, value); } }
package net.herit.iot.onem2m.incse.controller; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import org.bson.Document; import org.json.simple.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.sun.org.apache.xml.internal.security.exceptions.Base64DecodingException; import com.sun.org.apache.xml.internal.security.utils.Base64; import net.herit.iot.message.onem2m.OneM2mResponse.RESPONSE_STATUS; import net.herit.iot.message.onem2m.format.Enums.CONTENT_TYPE; import net.herit.iot.message.onem2m.OneM2mRequest; import net.herit.iot.message.onem2m.OneM2mResponse; import net.herit.iot.message.onem2m.OneM2mRequest.OPERATION; import net.herit.iot.message.onem2m.OneM2mRequest.RESOURCE_TYPE; import net.herit.iot.message.onem2m.OneM2mRequest.RESPONSE_TYPE; import net.herit.iot.message.onem2m.OneM2mRequest.RESULT_CONT; import net.herit.iot.onem2m.ae.emul.Constants; import net.herit.iot.onem2m.bind.codec.AbsSerializer; import net.herit.iot.onem2m.bind.http.client.HttpClient; import net.herit.iot.onem2m.core.convertor.ConvertorFactory; import net.herit.iot.onem2m.core.convertor.JSONConvertor; import net.herit.iot.onem2m.core.util.OneM2MException; import net.herit.iot.onem2m.core.util.Utils; import net.herit.iot.onem2m.incse.context.OneM2mContext; import net.herit.iot.onem2m.incse.controller.dm.Tr069DMAdapter; import net.herit.iot.onem2m.incse.controller.dm.Tr069DMAdapter.MOUri; import net.herit.iot.onem2m.incse.controller.dm.Tr069DMController; import net.herit.iot.onem2m.incse.facility.CfgManager; import net.herit.iot.onem2m.incse.facility.OneM2mUtil; import net.herit.iot.onem2m.incse.manager.ResourceManager; import net.herit.iot.onem2m.resource.AE; import net.herit.iot.onem2m.resource.AreaNwkDeviceInfo; import net.herit.iot.onem2m.resource.AreaNwkInfo; import net.herit.iot.onem2m.resource.Battery; import net.herit.iot.onem2m.resource.Container; import net.herit.iot.onem2m.resource.ContentInstance; import net.herit.iot.onem2m.resource.DeviceCapability; import net.herit.iot.onem2m.resource.DeviceInfo; import net.herit.iot.onem2m.resource.EventLog; import net.herit.iot.onem2m.resource.Firmware; import net.herit.iot.onem2m.resource.Memory; import net.herit.iot.onem2m.resource.MgmtResource; import net.herit.iot.onem2m.resource.Naming; import net.herit.iot.onem2m.resource.Node; import net.herit.iot.onem2m.resource.Reboot; import net.herit.iot.onem2m.resource.Resource; import net.herit.iot.onem2m.resource.RestCommand; import net.herit.iot.onem2m.resource.RestCommandCI; import net.herit.iot.onem2m.resource.RestCommandResult; import net.herit.iot.onem2m.resource.RestCommandResult2; import net.herit.iot.onem2m.resource.RestSubscription; import net.herit.iot.onem2m.resource.Software; import net.herit.iot.onem2m.resource.Subscription; public class RestCommandController { private final static RestCommandController INSTANCE = new RestCommandController(); private Logger log = LoggerFactory.getLogger(RestCommandController.class); private OneM2mContext context; private HashMap<String, RestCommand> requestMap; private Timer expireTimer; public static RestCommandController getInstance() { return INSTANCE; } public void initialize(OneM2mContext context) { this.context = context; this.requestMap = new HashMap<String, RestCommand>(); expireTimer = new Timer(); expireTimer.schedule(new ExpiredCmdTimer(), CfgManager.getInstance().getCommandExpireTimerInterval()*1000); } public OneM2mResponse processControl(RestCommand command) { try { OneM2mRequest reqMessage = new OneM2mRequest(); String commandId = command.getCommandId(); if (commandId == null || commandId.length() == 0) { commandId = OneM2mUtil.createRequestId(); command.setCommandId(commandId); } long hourLater = System.currentTimeMillis() + (1 * 60 * 60 * 1000); // 1 hour settting String expirationTime = new SimpleDateFormat(Naming.DATE_FORMAT).format(new java.util.Date(hourLater)); ContentInstance ci = new ContentInstance(); ci.setContentInfo("application/json:1"); // updated at 2017-05-12 //ci.setContentInfo("application/json:0"); // updated at 2016-09-30 //ci.setContentInfo("text/plain:0"); //ci.setExpirationTime(CfgManager.getInstance().getDefaultExpirationTime()); // added at 2016-12-23 ci.setExpirationTime(expirationTime); // added at 2016-12-28, set by 1-hour-later Document doc = new Document(); doc.append("exec_id", commandId); doc.append("data", command.getContent()); String json = doc.toJson(); //String json = "{ \"exec_id\": \""+commandId+"\", \"data\": \""+command.getContent()+"\"}"; String encodedStr = Base64.encode(json.getBytes()); encodedStr = encodedStr.replaceAll("(\r\n|\n)", ""); ci.setContent(encodedStr); // added in 2017-12-12 //ci.setContent(Base64.encode(json.getBytes())); // blocked at 2017-05-12 //ci.setContent(command.getContent()); // updated at 2016-09-30 AbsSerializer serializer = AbsSerializer.getSerializer(CONTENT_TYPE.JSON); byte[] content = serializer.serialize(ci).getBytes(); reqMessage.setTo(command.getUri()+"/"+command.getCommand()+"/execute"); reqMessage.setFrom("C-AE-Internal"); reqMessage.setContent(content); reqMessage.setOperation(OPERATION.CREATE); reqMessage.setRequestIdentifier(commandId); reqMessage.setResourceType(RESOURCE_TYPE.CONTENT_INST); reqMessage.setContentType(CONTENT_TYPE.RES_JSON); reqMessage.setResultContent(RESULT_CONT.ATTRIBUTE); reqMessage.setResponseType(RESPONSE_TYPE.BLOCK_REQ); try { putCommandToMap(commandId, command); OneM2mResponse response = new ResourceManager(this.context).processEx(reqMessage, false); log.debug("RestCommand Control processsed:"+response.toString()); return response; } catch (OneM2MException ex) { log.debug("Exception during process internal request triggered by RestCommand Control :"+reqMessage.toString()); log.debug("Exception:"+ex.toString()); OneM2mResponse response = new OneM2mResponse(); response.setResponseStatusCodeEnum(ex.getResponseStatusCode()); response.setRequest(reqMessage); log.debug("Handled exception", ex); return response; } } catch (Exception e) { log.debug("Handled exception", e); return null; } } public OneM2mResponse processIpeLwm2m(RestCommand command) { try { OneM2mRequest reqMessage = new OneM2mRequest(); String commandId = command.getCommandId(); if (commandId == null || commandId.length() == 0) { commandId = OneM2mUtil.createRequestId(); command.setCommandId(commandId); } long hourLater = System.currentTimeMillis() + (1 * 60 * 60 * 1000); // 1 hour settting String expirationTime = new SimpleDateFormat(Naming.DATE_FORMAT).format(new java.util.Date(hourLater)); ContentInstance ci = new ContentInstance(); ci.setContentInfo(command.getContentInfo()); // updated at 2017-02-08 //ci.setExpirationTime(CfgManager.getInstance().getDefaultExpirationTime()); // added at 2016-12-23 ci.setExpirationTime(expirationTime); // added at 2016-12-28, set by 1-hour-later ci.setContent(command.getContent()); // updated at 2017-02-08 AbsSerializer serializer = AbsSerializer.getSerializer(CONTENT_TYPE.JSON); byte[] content = serializer.serialize(ci).getBytes(); //reqMessage.setTo(command.getUri()+"/"+command.getCommand()+"/write"); // blocked in 2018-01-15 reqMessage.setTo(command.getUri()+"/"+command.getCommand()+"/execute"); // added in 2018-01-15 reqMessage.setFrom("C-AE-Internal"); reqMessage.setContent(content); reqMessage.setOperation(OPERATION.CREATE); reqMessage.setRequestIdentifier(commandId); reqMessage.setResourceType(RESOURCE_TYPE.CONTENT_INST); reqMessage.setContentType(CONTENT_TYPE.RES_JSON); reqMessage.setResultContent(RESULT_CONT.ATTRIBUTE); reqMessage.setResponseType(RESPONSE_TYPE.BLOCK_REQ); try { putCommandToMap(commandId, command); OneM2mResponse response = new ResourceManager(this.context).processEx(reqMessage, false); log.debug("RestCommand Control processsed:"+response.toString()); return response; } catch (OneM2MException ex) { log.debug("Exception during process internal request triggered by RestCommand Control :"+reqMessage.toString()); log.debug("Exception:"+ex.toString()); OneM2mResponse response = new OneM2mResponse(); response.setResponseStatusCodeEnum(ex.getResponseStatusCode()); response.setRequest(reqMessage); log.debug("Handled exception", ex); return response; } } catch (Exception e) { log.debug("Handled exception", e); return null; } } public OneM2mResponse processCommand(RestCommand command) { try { OneM2mRequest reqMessage = new OneM2mRequest(); String commandId = command.getCommandId(); if (commandId == null || commandId.length() == 0) { commandId = OneM2mUtil.createRequestId(); command.setCommandId(commandId); } ContentInstance ci = new ContentInstance(); ci.setContentInfo("application/json:1"); Document doc = new Document(); doc.append("exec_id", commandId); doc.append("data", command.getContent()); String json = doc.toJson(); //String json = "{ \"exec_id\": \""+commandId+"\", \"data\": \""+command.getContent()+"\"}"; ci.setContent(Base64.encode(json.getBytes())); AbsSerializer serializer = AbsSerializer.getSerializer(CONTENT_TYPE.JSON); byte[] content = serializer.serialize(ci).getBytes(); reqMessage.setTo(command.getUri()+"/"+command.getCommand()+"/Execute"); reqMessage.setFrom("C-AE-Internal"); reqMessage.setContent(content); reqMessage.setOperation(OPERATION.CREATE); reqMessage.setRequestIdentifier(commandId); reqMessage.setResourceType(RESOURCE_TYPE.CONTENT_INST); reqMessage.setContentType(CONTENT_TYPE.RES_JSON); reqMessage.setResultContent(RESULT_CONT.ATTRIBUTE); reqMessage.setResponseType(RESPONSE_TYPE.BLOCK_REQ); try { putCommandToMap(commandId, command); OneM2mResponse response = new ResourceManager(this.context).processEx(reqMessage, false); log.debug("RestCommand processsed:"+response.toString()); return response; } catch (OneM2MException ex) { log.debug("Exception during process internal request triggered by RestCommand:"+reqMessage.toString()); log.debug("Exception:"+ex.toString()); OneM2mResponse response = new OneM2mResponse(); response.setResponseStatusCodeEnum(ex.getResponseStatusCode()); response.setRequest(reqMessage); log.debug("Handled exception", ex); return response; } } catch (Exception e) { log.debug("Handled exception", e); return null; } } private OneM2mContext getContext() { return this.context; } public void processResult(ContentInstance ci) { try { String contentInfo = ci.getContentInfo(); if (contentInfo != null && contentInfo.equals("application/json:1")) { String content = ci.getContent(); String json = new String(Base64.decode(content), StandardCharsets.UTF_8); JSONConvertor<RestCommandCI> cvtr = (JSONConvertor<RestCommandCI>) ConvertorFactory.getJSONConvertor(RestCommandCI.class, null); RestCommandCI resultCi = cvtr.unmarshal(json); RestCommand cmd = getCommandFromMap(resultCi.getExecId(), true); if (cmd != null && !ci.getUri().startsWith(cmd.getUri()+"/action/Result")) { putCommandToMap(cmd.getCommandId(), cmd); cmd = null; } if (cmd != null) { String notiUri = cmd.getNotificationUri(); String code = resultCi.getExecResult(); String commandId = resultCi.getExecId(); String data = resultCi.getData(); RestCommandResult result = new RestCommandResult(); result.setResultCode(code); result.setCommandId(commandId); result.setResult(data); JSONConvertor<RestCommandResult> cvtRcr = (JSONConvertor<RestCommandResult>) ConvertorFactory.getJSONConvertor(RestCommandResult.class, null); String jsonReult = cvtRcr.marshal(result); OneM2mRequest reqMessage = new OneM2mRequest(); reqMessage.setContent(jsonReult.getBytes()); reqMessage.setTo(extractToFromFullUri(notiUri)); reqMessage.setOperation(OPERATION.CREATE); reqMessage.setContentType(CONTENT_TYPE.JSON); //reqMessage.setFrom("SI"); // new HttpClient().process(notiUri, reqMessage); HttpClient.getInstance().sendRequest(notiUri, reqMessage); } } } catch (Base64DecodingException e) { log.debug("Handled exception", e); return; } catch (Exception e) { log.debug("Handled exception", e); return; } } public void processResult2(ContentInstance ci) { // added by brianmoon at 2016-09-26 try { String contentInfo = ci.getContentInfo(); if (contentInfo != null && contentInfo.equals("application/json:1")) { String content = ci.getContent(); String json = new String(Base64.decode(content), StandardCharsets.UTF_8); JSONConvertor<RestCommandCI> cvtr = (JSONConvertor<RestCommandCI>) ConvertorFactory.getJSONConvertor(RestCommandCI.class, null); RestCommandCI resultCi = cvtr.unmarshal(json); RestCommand cmd = getCommandFromMap(resultCi.getExecId(), true); // if (cmd != null && !ci.getUri().startsWith(cmd.getUri()+"/Power/Status")) { // blocked in 2017-05-12 if (cmd != null && !ci.getUri().startsWith(cmd.getUri() + "/" + cmd.getCommand() + "/result")) { // added in 2017-06-08 putCommandToMap(cmd.getCommandId(), cmd); cmd = null; } if (cmd != null) { String notiUri = cmd.getNotificationUri(); String code = resultCi.getExecResult(); String commandId = resultCi.getExecId(); String data = resultCi.getData(); RestCommandResult2 result = new RestCommandResult2(); result.setCommandId(commandId); result.setUri(cmd.getUri()); result.setResultCode(code); // added in 2017-05-12 //result.setDeviceStatus(ci.getContent()); // blocked in 2017-05-12 result.setDeviceStatus(data); // added in 2017-05-12 result.setTimestamp(ci.getCreationTime()); JSONConvertor<RestCommandResult2> cvtRcr = (JSONConvertor<RestCommandResult2>) ConvertorFactory.getJSONConvertor(RestCommandResult2.class, null); String jsonReult = cvtRcr.marshal(result); OneM2mRequest reqMessage = new OneM2mRequest(); reqMessage.setContent(jsonReult.getBytes()); reqMessage.setTo(extractToFromFullUri(notiUri)); reqMessage.setOperation(OPERATION.CREATE); reqMessage.setContentType(CONTENT_TYPE.JSON); //reqMessage.setFrom("SI"); // new HttpClient().process(notiUri, reqMessage); HttpClient.getInstance().sendRequest(notiUri, reqMessage); } } } catch (Base64DecodingException e) { log.debug("Handled exception", e); return; } catch (Exception e) { log.debug("Handled exception", e); return; } } private void putCommandToMap(String reqId, RestCommand cmd) { if (cmd.getCreateTime() == null) { cmd.setCreateTime(new Timestamp(new Date().getTime())); } synchronized(this) { this.requestMap.put(reqId, cmd); } } private RestCommand getCommandFromMap(String reqId, boolean delete) { synchronized(this) { RestCommand cmd = this.requestMap.get(reqId); if (cmd != null && delete) { requestMap.remove(reqId); } return cmd; } } private String extractToFromFullUri(String fullUri) { String to = "/"; URI url; try { url = new URI(fullUri); String path = url.getPath(); if (path.length() > 1) { int i = fullUri.indexOf(path); to = fullUri.substring(i); } } catch (URISyntaxException e) { log.debug("Handled exception", e); } return to; } private HashMap<String, RestCommand> getRequestMap() { return this.requestMap; } class ExpiredCmdTimer extends TimerTask { public void run() { //log.debug("ExpiredCmdTimer run!!!!"); int expireSecond = CfgManager.getInstance().getCommandTimeout(); Timestamp currentTime = new Timestamp(new Date().getTime()); ArrayList<RestCommand> expiredCmd = new ArrayList<RestCommand>(); synchronized(RestCommandController.getInstance()) { Set<String> keySet = requestMap.keySet(); Iterator<String> it = keySet.iterator(); List<String> expiredReqIds = new ArrayList<String>(); while (it.hasNext()) { String reqId = it.next(); RestCommand cmd = requestMap.get(reqId); Timestamp expireTime = new Timestamp(cmd.getCreateTime().getTime() + expireSecond*1000L); if (currentTime.after(expireTime)) { expiredCmd.add(cmd); } } Iterator<RestCommand> itCmd = expiredCmd.iterator(); while (itCmd.hasNext()) { requestMap.remove(itCmd.next().getCommandId()); } } Iterator<RestCommand> it = expiredCmd.iterator(); while (it.hasNext()) { RestCommand cmd = it.next(); try { log.debug("##########################################"); log.debug("Processing expired command: {}", cmd.toString()); Document doc = new Document(); doc.put("_commandId", cmd.getCommand()); doc.put("_resultCode", RESPONSE_STATUS.COMMAND_TIMEOUT.Value()); doc.put("_result", RESPONSE_STATUS.COMMAND_TIMEOUT.toString()); String notiUri = cmd.getNotificationUri(); if(notiUri != null && !notiUri.equals("")) { // updated by brianmoon at 2016-09-26 OneM2mRequest reqMessage = new OneM2mRequest(); reqMessage.setTo(extractToFromFullUri(notiUri)); reqMessage.setOperation(OPERATION.CREATE); reqMessage.setContentType(CONTENT_TYPE.JSON); reqMessage.setContent(doc.toJson().getBytes()); // new HttpClient().process(notiUri, reqMessage); HttpClient.getInstance().sendRequest(notiUri, reqMessage); } } catch (Exception e) { log.debug("Handled exception", e); } } expireTimer.schedule(new ExpiredCmdTimer(), CfgManager.getInstance().getCommandExpireTimerInterval()*1000); } } // added in 2017-08-03 to create onem2m device management resource related with TR-069 public void setParameterValues(String deviceId, HashMap<String, Object> paramMap) { Tr069DMController controller = new Tr069DMController(); controller.setStatus(deviceId, paramMap); } // added in 2017-08-03 to create onem2m device management resource related with TR-069 public OneM2mResponse createContentInstance(HashMap<String, Object> paramMap) { try { String deviceId = paramMap.get(MOUri.DVC_OUI) + "-" + paramMap.get(MOUri.DVC_PRODUCTCLASS) + "-" + paramMap.get(MOUri.DVC_SERIAL); OneM2mResponse response = null; String parentUri = CfgManager.getInstance().getCSEBaseCid() + "/" + CfgManager.getInstance().getCSEBaseName() + "/" + "AE_" + deviceId; ContentInstance conInstance = null; if(paramMap.get(MOUri.DVC_TEMP_STATUS) != null) { parentUri = parentUri + "/" + this.getLastString(MOUri.DVC_TEMP_STATUS) + "/status"; conInstance = new ContentInstance(); conInstance.setContentInfo("application/json:1"); JSONObject json = new JSONObject(); json.put("deviceId", deviceId); json.put("val", paramMap.get(MOUri.DVC_TEMP_STATUS)); conInstance.setContent( Base64.encode(json.toString().getBytes()) ); response = createRegularResource(parentUri, conInstance, RESOURCE_TYPE.CONTENT_INST); } if(paramMap.get(MOUri.DVC_HUMIDITY_STATUS) != null) { parentUri = parentUri + "/" + this.getLastString(MOUri.DVC_HUMIDITY_STATUS) + "/status"; conInstance = new ContentInstance(); conInstance.setContentInfo("application/json:1"); JSONObject json = new JSONObject(); json.put("deviceId", deviceId); json.put("val", paramMap.get(MOUri.DVC_HUMIDITY_STATUS)); conInstance.setContent( Base64.encode(json.toString().getBytes()) ); response = createRegularResource(parentUri, conInstance, RESOURCE_TYPE.CONTENT_INST); } if(paramMap.get(MOUri.DVC_LED_STATUS) != null && !paramMap.get(MOUri.DVC_LED_STATUS).equals("")) { parentUri = parentUri + "/" + this.getLastString(MOUri.DVC_LED_STATUS) + "/result"; conInstance = new ContentInstance(); conInstance.setContentInfo("application/json:1"); JSONObject json = new JSONObject(); json.put("deviceId", deviceId); json.put("val", paramMap.get(MOUri.DVC_LED_STATUS)); conInstance.setContent( Base64.encode(json.toString().getBytes()) ); response = createRegularResource(parentUri, conInstance, RESOURCE_TYPE.CONTENT_INST); } return response; } catch (Exception e) { log.debug("Handled exception", e); return null; } } // added in 2017-08-03 to create onem2m device management resource related with TR-069 public OneM2mResponse addTr69DMResource(HashMap<String, Object> paramMap) { try { OneM2mRequest reqMessage = new OneM2mRequest(); String deviceId = paramMap.get(MOUri.DVC_OUI) + "-" + paramMap.get(MOUri.DVC_PRODUCTCLASS) + "-" + paramMap.get(MOUri.DVC_SERIAL); Node node = new Node(); node.setNodeID(deviceId); AbsSerializer serializer = AbsSerializer.getSerializer(CONTENT_TYPE.JSON); byte[] content = serializer.serialize(node).getBytes(); String targetUri = CfgManager.getInstance().getCSEBaseCid() + "/" + CfgManager.getInstance().getCSEBaseName(); reqMessage.setTo(targetUri); reqMessage.setFrom("C-AE-Internal"); reqMessage.setContent(content); reqMessage.setOperation(OPERATION.CREATE); reqMessage.setRequestIdentifier("REQ_" + deviceId); reqMessage.setResourceType(RESOURCE_TYPE.NODE); reqMessage.setContentType(CONTENT_TYPE.RES_JSON); reqMessage.setResultContent(RESULT_CONT.ATTRIBUTE); reqMessage.setResponseType(RESPONSE_TYPE.BLOCK_REQ); try { OneM2mResponse response = new ResourceManager(this.context).processEx(reqMessage, false); log.debug("addTr69DMResource processsed:"+response.toString()); JSONConvertor<?> jsonCvt = ConvertorFactory.getJSONConvertor(Node.class, Node.SCHEMA_LOCATION); String json = new String(response.getContent()); //json = "{ \"restSubs\" : " + json + "}"; // added in 2016-11-24 to process JSON ROOT, updated in 2017-07-27 from rest to restSubs Node resNode = (Node)jsonCvt.unmarshal(json); String mgmtObjUri = targetUri + "/" + resNode.getResourceName(); // AE AE ae = new AE(); String parentUri = ""; String aeResourceName = "AE_" + deviceId; ae.setResourceName(aeResourceName); ae.setAppID("TR-069_AE_" + deviceId); ae.setRequestReachability(true); ae.setNodeLink(resNode.getResourceID()); ae.addPointOfAccess(CfgManager.getInstance().getPointOfAccess() + "/" + aeResourceName); response = createRegularResource(targetUri, ae, RESOURCE_TYPE.AE); Container container = null; if(paramMap.get(MOUri.DVC_TEMP_STATUS) != null && !paramMap.get(MOUri.DVC_TEMP_STATUS).toString().equals("")) { // Container Temperature container = new Container(); container.setResourceName(this.getLastString(MOUri.DVC_TEMP_STATUS)); parentUri = targetUri + "/" + aeResourceName; response = createRegularResource(parentUri, container, RESOURCE_TYPE.CONTAINER); // Container Status container = new Container(); container.setResourceName("status"); container.setMaxNrOfInstances(1000); container.setMaxInstanceAge(36000); container.setMaxByteSize(1024000); parentUri = targetUri + "/" + aeResourceName + "/" + this.getLastString(MOUri.DVC_TEMP_STATUS); response = createRegularResource(parentUri, container, RESOURCE_TYPE.CONTAINER); } if(paramMap.get(MOUri.DVC_HUMIDITY_STATUS) != null && !paramMap.get(MOUri.DVC_HUMIDITY_STATUS).toString().equals("")) { // Container Humidity container = new Container(); container.setResourceName(this.getLastString(MOUri.DVC_HUMIDITY_STATUS)); parentUri = targetUri + "/" + aeResourceName; response = createRegularResource(parentUri, container, RESOURCE_TYPE.CONTAINER); // Container Status container = new Container(); container.setResourceName("status"); container.setMaxNrOfInstances(1000); container.setMaxInstanceAge(36000); container.setMaxByteSize(1024000); parentUri = targetUri + "/" + aeResourceName + "/" + this.getLastString(MOUri.DVC_HUMIDITY_STATUS);; response = createRegularResource(parentUri, container, RESOURCE_TYPE.CONTAINER); } if(paramMap.get(MOUri.DVC_LED_STATUS) != null && !paramMap.get(MOUri.DVC_LED_STATUS).toString().equals("")) { // Container LED container = new Container(); container.setResourceName(this.getLastString(MOUri.DVC_LED_STATUS)); parentUri = targetUri + "/" + aeResourceName; response = createRegularResource(parentUri, container, RESOURCE_TYPE.CONTAINER); // Container execute container = new Container(); container.setResourceName("execute"); container.setMaxNrOfInstances(1000); container.setMaxInstanceAge(36000); container.setMaxByteSize(1024000); parentUri = targetUri + "/" + aeResourceName + "/" + this.getLastString(MOUri.DVC_LED_STATUS);; response = createRegularResource(parentUri, container, RESOURCE_TYPE.CONTAINER); // Subscription Subscription sub = new Subscription(); String notiUrl = "http://" + CfgManager.getInstance().getHostname() + ":" + CfgManager.getInstance().getRestServerPort() + "/dm/tr69/noti/receive"; sub.addNotificationURI(notiUrl); response = createRegularResource(parentUri, sub, RESOURCE_TYPE.SUBSCRIPTION); // Container result container = new Container(); container.setResourceName("result"); container.setMaxNrOfInstances(1000); container.setMaxInstanceAge(36000); container.setMaxByteSize(1024000); parentUri = targetUri + "/" + aeResourceName + "/" + this.getLastString(MOUri.DVC_LED_STATUS);; response = createRegularResource(parentUri, container, RESOURCE_TYPE.CONTAINER); } // deviceInfo DeviceInfo deviceInfo = new DeviceInfo(); deviceInfo.setMgmtDefinition(RESOURCE_TYPE.MGMT_DEVICE_INFO.Value()); deviceInfo.setDeviceLabel(paramMap.get(MOUri.DVC_SERIAL).toString() ); deviceInfo.setManufacturer( paramMap.get(MOUri.DVC_MANUFACTURER).toString() ); deviceInfo.setModel( paramMap.get(MOUri.DVC_MODEL).toString() ); deviceInfo.setDeviceType(paramMap.get(MOUri.DVC_PRODUCTCLASS).toString() ); deviceInfo.setFwVersion( paramMap.get(MOUri.DVC_SW_VERSION).toString() ); deviceInfo.setSwVersion( paramMap.get(MOUri.DVC_SW_VERSION).toString() ); deviceInfo.setHwVersion( paramMap.get(MOUri.DVC_HW_VERSION).toString() ); response = createMgmtObjectResource(mgmtObjUri, deviceInfo, RESOURCE_TYPE.MGMT_DEVICE_INFO); //reoot Reboot reboot = new Reboot(); reboot.setMgmtDefinition(RESOURCE_TYPE.MGMT_REBOOT.Value()); response = createMgmtObjectResource(mgmtObjUri, reboot, RESOURCE_TYPE.MGMT_REBOOT); if(paramMap.get(MOUri.DVC_MEMORY_TOTAL) != null && paramMap.get(MOUri.DVC_MEMORY_TOTAL) != null) { // memory Memory memory = new Memory(); memory.setMgmtDefinition(RESOURCE_TYPE.MGMT_MEMORY.Value()); memory.setMemTotal(Integer.parseInt( paramMap.get(MOUri.DVC_MEMORY_TOTAL).toString() )); memory.setMemAvailable(Integer.parseInt( paramMap.get(MOUri.DVC_MEMORY_FREE).toString() )); response = createMgmtObjectResource(mgmtObjUri, memory, RESOURCE_TYPE.MGMT_MEMORY); } else if(paramMap.get(MOUri.DVC_BATTERY_LEVEL) != null && paramMap.get(MOUri.DVC_BATTERY_STATUS) != null) { //battery Battery battery = new Battery(); battery.setMgmtDefinition(RESOURCE_TYPE.MGMT_BATTERY.Value()); battery.setBatteryLevel(Long.parseLong( paramMap.get(MOUri.DVC_BATTERY_LEVEL).toString() )); battery.setBatteryStatus(Integer.parseInt( paramMap.get(MOUri.DVC_BATTERY_STATUS).toString() )); response = createMgmtObjectResource(mgmtObjUri, battery, RESOURCE_TYPE.MGMT_BATTERY); } else if(paramMap.get(MOUri.DVC_EVT_LOG_TYPE_ID) != null && paramMap.get(MOUri.DVC_EVT_LOG_DATA) != null && paramMap.get(MOUri.DVC_EVT_LOG_STATUS) != null) { //eventLog EventLog eventLog = new EventLog(); eventLog.setMgmtDefinition(RESOURCE_TYPE.MGMT_EVENT_LOG.Value()); eventLog.setLogTypeId( Integer.parseInt( paramMap.get(MOUri.DVC_EVT_LOG_TYPE_ID).toString() ) ); eventLog.setLogData( paramMap.get(MOUri.DVC_EVT_LOG_DATA).toString() ); eventLog.setLogStatus( Integer.parseInt( paramMap.get(MOUri.DVC_EVT_LOG_STATUS).toString() ) ); response = createMgmtObjectResource(mgmtObjUri, eventLog, RESOURCE_TYPE.MGMT_EVENT_LOG); } return response; } catch (OneM2MException ex) { log.debug("Exception during process internal request triggered by addTr69DMResource:"+reqMessage.toString()); log.debug("Exception:"+ex.toString()); OneM2mResponse response = new OneM2mResponse(); response.setResponseStatusCodeEnum(ex.getResponseStatusCode()); response.setRequest(reqMessage); log.debug("Handled exception", ex); return response; } } catch (Exception e) { log.debug("Handled exception", e); return null; } } private OneM2mResponse createMgmtObjectResource(String parentUri, MgmtResource mgmtObj, RESOURCE_TYPE resType) { try { OneM2mRequest reqMessage = new OneM2mRequest(); AbsSerializer serializer = AbsSerializer.getSerializer(CONTENT_TYPE.JSON); byte[] content = serializer.serialize(mgmtObj).getBytes(); reqMessage.setTo(parentUri); reqMessage.setFrom("C-AE-Internal"); reqMessage.setContent(content); reqMessage.setOperation(OPERATION.CREATE); reqMessage.setRequestIdentifier("REQ_1234567890"); reqMessage.setResourceType(resType); reqMessage.setContentType(CONTENT_TYPE.RES_JSON); reqMessage.setResultContent(RESULT_CONT.ATTRIBUTE); reqMessage.setResponseType(RESPONSE_TYPE.BLOCK_REQ); try { OneM2mResponse response = new ResourceManager(this.context).processEx(reqMessage, false); log.debug("createMgmtObjectResource processsed:"+response.toString()); return response; } catch (OneM2MException ex) { log.debug("Exception during process internal request triggered by addTr69DMResource:"+reqMessage.toString()); log.debug("Exception:"+ex.toString()); OneM2mResponse response = new OneM2mResponse(); response.setResponseStatusCodeEnum(ex.getResponseStatusCode()); response.setRequest(reqMessage); log.debug("Handled exception", ex); return response; } } catch (Exception e) { log.debug("Handled exception", e); return null; } } private OneM2mResponse createRegularResource(String parentUri, Resource resource, RESOURCE_TYPE resType) { try { OneM2mRequest reqMessage = new OneM2mRequest(); AbsSerializer serializer = AbsSerializer.getSerializer(CONTENT_TYPE.JSON); byte[] content = serializer.serialize(resource).getBytes(); String origin = "C-AE-Internal"; if(resType.equals(RESOURCE_TYPE.AE)) { origin = "S"; } reqMessage.setTo(parentUri); reqMessage.setFrom(origin); reqMessage.setContent(content); reqMessage.setOperation(OPERATION.CREATE); reqMessage.setRequestIdentifier("REQ_1234567890"); reqMessage.setResourceType(resType); reqMessage.setContentType(CONTENT_TYPE.RES_JSON); reqMessage.setResultContent(RESULT_CONT.ATTRIBUTE); reqMessage.setResponseType(RESPONSE_TYPE.BLOCK_REQ); try { OneM2mResponse response = new ResourceManager(this.context).processEx(reqMessage, false); log.debug("createMgmtObjectResource processsed:"+response.toString()); return response; } catch (OneM2MException ex) { log.debug("Exception during process internal request triggered by addTr69DMResource:"+reqMessage.toString()); log.debug("Exception:"+ex.toString()); OneM2mResponse response = new OneM2mResponse(); response.setResponseStatusCodeEnum(ex.getResponseStatusCode()); response.setRequest(reqMessage); log.debug("Handled exception", ex); return response; } } catch (Exception e) { log.debug("Handled exception", e); return null; } } private String getLastString(String longStr) { int idx = longStr.lastIndexOf("."); return longStr.substring(idx+1); } // test code public static void main(String[] args) throws Exception { Document doc = new Document(); doc.append("exec_id", "cmd_0506"); doc.append("data", "{\"actionType\":\"ringAlarm\",\"user_id\":\"S000001\",\"alarm_id\":\"1\"}"); String json1 = doc.toJson(); String json2 = "{ \"exec_id\": \"commandid\", \"data\": \"{\"actionType\":\"testAlarm\",\"user_id\":\"u00002\",\"alarm_id\":\"1\"}\"}"; String json3 = "{ \"exec_id\": \"commandid\", \"data\": \"{\\\"actionType\\\":\\\"testAlarm\\\",\\\"user_id\\\":\\\"u00002\\\",\\\"alarm_id\\\":\\\"1\\\"}\"}"; System.out.println(json1); System.out.println(Base64.encode(json1.getBytes())); System.out.println(json2); System.out.println(Base64.encode(json2.getBytes())); System.out.println(json3); System.out.println(Base64.encode(json3.getBytes())); } }
package com.vansuita.materialabout.util; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; import android.net.Uri; import android.provider.ContactsContract; import androidx.annotation.NonNull; import android.view.View; import com.vansuita.materialabout.R; /** * Created by jrvansuita on 10/02/17. */ public final class IntentUtil { private Context context; public IntentUtil(Context context) { this.context = context; } public Intent intent(String url) { return intent(uri(url)); } public Uri uri(String url) { return Uri.parse(url); } public Intent intent(Uri uri) { return new Intent(Intent.ACTION_VIEW, uri); } public View.OnClickListener clickUri(Uri uri) { return clickIntent(intent(uri)); } public View.OnClickListener clickIntent(final Intent intent) { return new View.OnClickListener() { @Override public void onClick(View view) { open(intent); } }; } public void open(Intent intent) { try { context.startActivity(intent); } catch (Throwable e) { e.printStackTrace(); } } public void open(Uri uri) { open(intent(uri)); } public Intent openFacebook(String user) { try { tryPackage(R.string.id_facebook_app); return intent(R.string.uri_facebook_app, user); } catch (Exception e) { return intent(R.string.url_facebook_website, user); } } public Intent intent(int res, String user) { return intent(uri(res, user)); } public Uri uri(int res, String user) { return Uri.parse(context.getString(res, user)); } private void tryPackage(int res) throws PackageManager.NameNotFoundException { context.getPackageManager().getPackageInfo(context.getString(res), 0); } public Intent openInstagram(String user) { try { tryPackage(R.string.id_instagram_app); return intent(R.string.uri_instagram_app, user); } catch (Exception e) { return intent(R.string.url_instagram_website, user); } } public Intent openTwitter(String user) { try { tryPackage(R.string.id_twitter_app); return intent(R.string.uri_twitter_app, user); } catch (Exception e) { return intent(R.string.url_twitter_website, user); } } public Intent openGooglePlus(String user) { try { tryPackage(R.string.id_google_plus_app); return intent(R.string.uri_google_plus_app, user); } catch (Exception e) { return intent(R.string.url_google_plus_website, user); } } public Intent openGooglePlayDev(String user) { try { return intent(R.string.url_google_play_store_developer_page, user); } catch (Exception e) { return intent(R.string.url_google_play_store_developer_page, user); } } public Intent openYoutubeChannel(String user) { try { return intent(R.string.url_youtube_channel_website, user); } catch (Exception e) { return intent(R.string.url_youtube_channel_website, user); } } public Intent openYoutubeUser(String user) { try { return intent(R.string.url_youtube_user_website, user); } catch (Exception e) { return intent(R.string.url_youtube_user_website, user); } } public Intent openLinkedIn(String user) { try { tryPackage(R.string.id_linkedin_app); return intent(R.string.uri_linkedin_app, user); } catch (Exception e) { return intent(R.string.url_linkedin_website, user); } } public Intent openSkype(String phone) { try { tryPackage(R.string.id_skype_app); return intent(R.string.uri_skype_app, phone); } catch (Exception e) { return intent(R.string.uri_skype_app, phone); } } @NonNull public Intent openAddContact(String name, String phone) { Intent intent = new Intent(Intent.ACTION_INSERT); intent.setType(ContactsContract.Contacts.CONTENT_TYPE); intent.putExtra(ContactsContract.Intents.Insert.NAME, name); intent.putExtra(ContactsContract.Intents.Insert.PHONE, phone); return intent; } @NonNull public Intent sendEmail(String email, String subject, String message) { Intent intent = new Intent(Intent.ACTION_SENDTO); intent.setData(Uri.parse("mailto:")); // only email apps should handle this intent.putExtra(Intent.EXTRA_EMAIL, new String[]{email}); intent.putExtra(Intent.EXTRA_SUBJECT, subject); intent.putExtra(Intent.EXTRA_TEXT, message); return intent; } @NonNull public Intent openPlayStoreAppPage(String app) { Intent intent = intent(R.string.uri_play_store_app, app); if (intent.resolveActivity(context.getPackageManager()) != null) { return intent; } else { return intent(R.string.uri_play_store_app_website, app); } } @NonNull public Intent openPlayStoreAppsList(String app) { Intent intent = intent(R.string.uri_play_store_apps_list, app); if (intent.resolveActivity(context.getPackageManager()) != null) { return intent; } else { return intent(R.string.uri_play_store_apps_list_website, app); } } @NonNull public Intent shareThisApp(String subject, String message) { Intent intent = new Intent(Intent.ACTION_SEND); intent.setType("text/plain"); intent.putExtra(Intent.EXTRA_SUBJECT, subject); intent.putExtra(Intent.EXTRA_TEXT, message); return intent; } }
/* * #%L * SymbolElement.java - mongodb-async-driver - Allanbank Consulting, Inc. * %% * Copyright (C) 2011 - 2014 Allanbank Consulting, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.allanbank.mongodb.bson.element; import static com.allanbank.mongodb.util.Assertions.assertNotNull; import javax.annotation.concurrent.Immutable; import javax.annotation.concurrent.ThreadSafe; import com.allanbank.mongodb.bson.Element; import com.allanbank.mongodb.bson.ElementType; import com.allanbank.mongodb.bson.Visitor; import com.allanbank.mongodb.bson.io.StringEncoder; /** * A wrapper for a BSON symbol. * * @api.yes This class is part of the driver's API. Public and protected members * will be deprecated for at least 1 non-bugfix release (version * numbers are &lt;major&gt;.&lt;minor&gt;.&lt;bugfix&gt;) before being * removed or modified. * @copyright 2011-2013, Allanbank Consulting, Inc., All Rights Reserved */ @Immutable @ThreadSafe public class SymbolElement extends AbstractElement { /** * The {@link SymbolElement}'s class to avoid the * {@link Class#forName(String) Class.forName(...)} overhead. */ public static final Class<SymbolElement> SYMBOL_CLASS = SymbolElement.class; /** The BSON type for a symbol. */ public static final ElementType TYPE = ElementType.SYMBOL; /** Serialization version for the class. */ private static final long serialVersionUID = -3181997000292958333L; /** * Computes and returns the number of bytes that are used to encode the * element. * * @param name * The name for the element. * @param symbol * The BSON symbol value. * @return The size of the element when encoded in bytes. */ private static long computeSize(final String name, final String symbol) { long result = 7; // type (1) + name null byte (1) + // symbol length (4) + symbol null byte (1) result += StringEncoder.utf8Size(name); result += StringEncoder.utf8Size(symbol); return result; } /** The BSON string value. */ private final String mySymbol; /** * Constructs a new {@link SymbolElement}. * * @param name * The name for the BSON string. * @param symbol * The BSON symbol value. * @throws IllegalArgumentException * If the {@code name} or {@code symbol} is <code>null</code>. */ public SymbolElement(final String name, final String symbol) { this(name, symbol, computeSize(name, symbol)); } /** * Constructs a new {@link SymbolElement}. * * @param name * The name for the BSON string. * @param symbol * The BSON symbol value. * @param size * The size of the element when encoded in bytes. If not known * then use the * {@link StringElement#StringElement(String, String)} * constructor instead. * @throws IllegalArgumentException * If the {@code name} or {@code symbol} is <code>null</code>. */ public SymbolElement(final String name, final String symbol, final long size) { super(name, size); assertNotNull(symbol, "Symbol element's symbol cannot be null."); mySymbol = symbol; } /** * Accepts the visitor and calls the {@link Visitor#visitSymbol} method. * * @see Element#accept(Visitor) */ @Override public void accept(final Visitor visitor) { visitor.visitSymbol(getName(), getSymbol()); } /** * {@inheritDoc} * <p> * Overridden to compare the string values if the base class comparison is * equals. * </p> * <p> * Note that for MongoDB {@link SymbolElement} and {@link StringElement} * will return equal based on the type. Care is taken here to make sure that * the values return the same value regardless of comparison order. * </p> * <p> * Note: Comparison of strings in MongoDB does not use a collator. This * class emulates the MongoDB behavior and orders the string elements based * on the UTF-8 encoding of the strings. * </p> */ @Override public int compareTo(final Element otherElement) { int result = super.compareTo(otherElement); if (result == 0) { // Might be a StringElement or SymbolElement. final ElementType otherType = otherElement.getType(); if (otherType == ElementType.SYMBOL) { result = StringElement.utf8Compare(mySymbol, ((SymbolElement) otherElement).getSymbol()); } else { result = StringElement.utf8Compare(mySymbol, ((StringElement) otherElement).getValue()); } } return result; } /** * Determines if the passed object is of this same type as this object and * if so that its fields are equal. * * @param object * The object to compare to. * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(final Object object) { boolean result = false; if (this == object) { result = true; } else if ((object != null) && (getClass() == object.getClass())) { final SymbolElement other = (SymbolElement) object; result = super.equals(object) && nullSafeEquals(mySymbol, other.mySymbol); } return result; } /** * Returns the BSON symbol value. * * @return The BSON symbol value. */ public String getSymbol() { return mySymbol; } /** * {@inheritDoc} */ @Override public ElementType getType() { return TYPE; } /** * {@inheritDoc} * <p> * Returns the {@link String} symbol. * </p> */ @Override public String getValueAsObject() { return getSymbol(); } /** * {@inheritDoc} * <p> * Returns the {@link String} symbol. * </p> */ @Override public String getValueAsString() { return getSymbol(); } /** * Computes a reasonable hash code. * * @return The hash code value. */ @Override public int hashCode() { int result = 1; result = (31 * result) + super.hashCode(); result = (31 * result) + ((mySymbol != null) ? mySymbol.hashCode() : 3); return result; } /** * {@inheritDoc} * <p> * Returns a new {@link SymbolElement}. * </p> */ @Override public SymbolElement withName(final String name) { if (getName().equals(name)) { return this; } return new SymbolElement(name, mySymbol); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kinesisvideo.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Specifies the condition that streams must satisfy to be returned when you list streams (see the * <code>ListStreams</code> API). A condition has a comparison operation and a value. Currently, you can specify only * the <code>BEGINS_WITH</code> operator, which finds streams whose names start with a given prefix. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kinesisvideo-2017-09-30/StreamNameCondition" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class StreamNameCondition implements Serializable, Cloneable, StructuredPojo { /** * <p> * A comparison operator. Currently, you can specify only the <code>BEGINS_WITH</code> operator, which finds streams * whose names start with a given prefix. * </p> */ private String comparisonOperator; /** * <p> * A value to compare. * </p> */ private String comparisonValue; /** * <p> * A comparison operator. Currently, you can specify only the <code>BEGINS_WITH</code> operator, which finds streams * whose names start with a given prefix. * </p> * * @param comparisonOperator * A comparison operator. Currently, you can specify only the <code>BEGINS_WITH</code> operator, which finds * streams whose names start with a given prefix. * @see ComparisonOperator */ public void setComparisonOperator(String comparisonOperator) { this.comparisonOperator = comparisonOperator; } /** * <p> * A comparison operator. Currently, you can specify only the <code>BEGINS_WITH</code> operator, which finds streams * whose names start with a given prefix. * </p> * * @return A comparison operator. Currently, you can specify only the <code>BEGINS_WITH</code> operator, which finds * streams whose names start with a given prefix. * @see ComparisonOperator */ public String getComparisonOperator() { return this.comparisonOperator; } /** * <p> * A comparison operator. Currently, you can specify only the <code>BEGINS_WITH</code> operator, which finds streams * whose names start with a given prefix. * </p> * * @param comparisonOperator * A comparison operator. Currently, you can specify only the <code>BEGINS_WITH</code> operator, which finds * streams whose names start with a given prefix. * @return Returns a reference to this object so that method calls can be chained together. * @see ComparisonOperator */ public StreamNameCondition withComparisonOperator(String comparisonOperator) { setComparisonOperator(comparisonOperator); return this; } /** * <p> * A comparison operator. Currently, you can specify only the <code>BEGINS_WITH</code> operator, which finds streams * whose names start with a given prefix. * </p> * * @param comparisonOperator * A comparison operator. Currently, you can specify only the <code>BEGINS_WITH</code> operator, which finds * streams whose names start with a given prefix. * @return Returns a reference to this object so that method calls can be chained together. * @see ComparisonOperator */ public StreamNameCondition withComparisonOperator(ComparisonOperator comparisonOperator) { this.comparisonOperator = comparisonOperator.toString(); return this; } /** * <p> * A value to compare. * </p> * * @param comparisonValue * A value to compare. */ public void setComparisonValue(String comparisonValue) { this.comparisonValue = comparisonValue; } /** * <p> * A value to compare. * </p> * * @return A value to compare. */ public String getComparisonValue() { return this.comparisonValue; } /** * <p> * A value to compare. * </p> * * @param comparisonValue * A value to compare. * @return Returns a reference to this object so that method calls can be chained together. */ public StreamNameCondition withComparisonValue(String comparisonValue) { setComparisonValue(comparisonValue); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getComparisonOperator() != null) sb.append("ComparisonOperator: ").append(getComparisonOperator()).append(","); if (getComparisonValue() != null) sb.append("ComparisonValue: ").append(getComparisonValue()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof StreamNameCondition == false) return false; StreamNameCondition other = (StreamNameCondition) obj; if (other.getComparisonOperator() == null ^ this.getComparisonOperator() == null) return false; if (other.getComparisonOperator() != null && other.getComparisonOperator().equals(this.getComparisonOperator()) == false) return false; if (other.getComparisonValue() == null ^ this.getComparisonValue() == null) return false; if (other.getComparisonValue() != null && other.getComparisonValue().equals(this.getComparisonValue()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getComparisonOperator() == null) ? 0 : getComparisonOperator().hashCode()); hashCode = prime * hashCode + ((getComparisonValue() == null) ? 0 : getComparisonValue().hashCode()); return hashCode; } @Override public StreamNameCondition clone() { try { return (StreamNameCondition) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.kinesisvideo.model.transform.StreamNameConditionMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright (c) 2016 Open Baton (http://www.openbaton.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openbaton.integration.test; import java.io.IOException; import java.net.URL; import java.util.*; import java.util.concurrent.TimeUnit; import org.openbaton.catalogue.mano.descriptor.NetworkServiceDescriptor; import org.openbaton.catalogue.mano.descriptor.VirtualNetworkFunctionDescriptor; import org.openbaton.catalogue.mano.record.NetworkServiceRecord; import org.openbaton.catalogue.nfvo.VNFPackage; import org.openbaton.catalogue.nfvo.viminstances.BaseVimInstance; import org.openbaton.integration.test.utils.Utils; import org.openbaton.sdk.NFVORequestor; import org.openbaton.sdk.NfvoRequestorBuilder; import org.openbaton.sdk.api.exception.SDKException; import org.openbaton.sdk.api.rest.NetworkServiceDescriptorAgent; import org.openbaton.sdk.api.rest.NetworkServiceRecordAgent; import org.openbaton.sdk.api.rest.VNFPackageAgent; import org.openbaton.sdk.api.rest.VimInstanceAgent; import org.openbaton.sdk.api.rest.VirtualNetworkFunctionDescriptorAgent; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class MainIntegrationTest { private static final String EXTERNAL_PATH = "/etc/openbaton/integration-tests/"; private static final String PROPERTIES_FILE = "integration-tests.properties"; private static final String SCENARIOS_PATH = "integration-test-scenarios/"; private static final String NSD_PATH = "network-service-descriptors/"; private static final String VIM_PATH = "vim-instances/"; private static final String SCRIPTS_PATH = "scripts/"; private static final String VNF_PACKAGES_PATH = "vnf-packages/"; private static final Logger log = LoggerFactory.getLogger(MainIntegrationTest.class); private static String nfvoIp; private static int nfvoPort; private static String nfvoUsr; private static String nfvoPwd; private static String projectName; private static boolean sslEnabled; private static boolean clearAfterTest = false; /** * Load properties from configuration file * * @return The {@code Properties} object filled * @throws IOException In case of IOException while opening the properties file */ private static Properties loadProperties() throws IOException { String propertiesFile; // Checking whether external properties file exists if (Utils.checkFileExists(EXTERNAL_PATH + PROPERTIES_FILE)) { propertiesFile = EXTERNAL_PATH + PROPERTIES_FILE; } else { propertiesFile = PROPERTIES_FILE; } log.debug("Using properties file: " + propertiesFile); Properties properties = Utils.getProperties(propertiesFile); properties.setProperty("nfvo-ip", properties.getProperty("nfvo-ip", "localhost")); properties.setProperty("nfvo-port", properties.getProperty("nfvo-port", "8080")); properties.setProperty("nfvo-usr", properties.getProperty("nfvo-usr", "admin")); properties.setProperty("nfvo-pwd", properties.getProperty("nfvo-pwd", "openbaton")); properties.setProperty( "nfvo-project-name", properties.getProperty("nfvo-project-name", "default")); properties.setProperty("nfvo-ssl-enabled", properties.getProperty("nfvo-ssl-enabled", "false")); properties.setProperty("local-ip", properties.getProperty("local-ip", "localhost")); properties.setProperty("rest-waiter-port", properties.getProperty("rest-waiter-port", "8181")); properties.setProperty("clear-after-test", properties.getProperty("clear-after-test", "true")); nfvoIp = properties.getProperty("nfvo-ip"); nfvoPort = Integer.parseInt(properties.getProperty("nfvo-port")); nfvoUsr = properties.getProperty("nfvo-usr"); nfvoPwd = properties.getProperty("nfvo-pwd"); projectName = properties.getProperty("nfvo-project-name"); sslEnabled = Boolean.parseBoolean(properties.getProperty("nfvo-ssl-enabled")); clearAfterTest = Boolean.parseBoolean(properties.getProperty("clear-after-test")); // default folders where scenario, nsd, and script files are placed if (!Utils.checkFileExists(properties.getProperty("integration-test-scenarios"))) properties.setProperty("integration-test-scenarios", SCENARIOS_PATH); if (!Utils.checkFileExists(properties.getProperty("nsd-path"))) properties.setProperty("nsd-path", NSD_PATH); if (!Utils.checkFileExists(properties.getProperty("vim-path"))) properties.setProperty("vim-path", VIM_PATH); if (!Utils.checkFileExists(properties.getProperty("scripts-path"))) properties.setProperty("scripts-path", SCRIPTS_PATH); if (!Utils.checkFileExists(properties.getProperty("vnf-packages-path"))) properties.setProperty("vnf-packages-path", VNF_PACKAGES_PATH); return properties; } /** * This method tries to remove every NSD, VNFD, VNFPackage and VIM from the orchestrator. * * @param requestor The {@code NFVORequestor} to use */ private static void clearOrchestrator(NFVORequestor requestor) { try { NetworkServiceRecordAgent nsrAgent = requestor.getNetworkServiceRecordAgent(); List<NetworkServiceRecord> nsrList = nsrAgent.findAll(); for (NetworkServiceRecord nsr : nsrList) { try { nsrAgent.delete(nsr.getId()); } catch (SDKException se) { log.error("Could not remove NSR " + nsr.getName() + " with ID " + nsr.getId()); } } Thread.sleep(1000); NetworkServiceDescriptorAgent nsdAgent = requestor.getNetworkServiceDescriptorAgent(); List<NetworkServiceDescriptor> nsdList = nsdAgent.findAll(); for (NetworkServiceDescriptor nsd : nsdList) { try { nsdAgent.delete(nsd.getId()); } catch (SDKException se) { log.error("Could not remove NSD " + nsd.getName() + " with ID " + nsd.getId()); } } Thread.sleep(1000); VirtualNetworkFunctionDescriptorAgent vnfdAgent = requestor.getVirtualNetworkFunctionDescriptorAgent(); List<VirtualNetworkFunctionDescriptor> vnfdList = vnfdAgent.findAll(); for (VirtualNetworkFunctionDescriptor vnfd : vnfdList) { vnfdAgent.delete(vnfd.getId()); } Thread.sleep(1000); VNFPackageAgent packageAgent = requestor.getVNFPackageAgent(); List<VNFPackage> packageList = packageAgent.findAll(); for (VNFPackage p : packageList) { try { packageAgent.delete(p.getId()); } catch (SDKException se) { log.error("Could not remove VNFPackage " + p.getName() + " with ID " + p.getId()); } } VimInstanceAgent vimAgent = requestor.getVimInstanceAgent(); List<BaseVimInstance> vimList = vimAgent.findAll(); for (BaseVimInstance vim : vimList) { try { vimAgent.delete(vim.getId()); } catch (SDKException se) { log.error("Could not remove VIM " + vim.getName() + " with ID " + vim.getId()); } } } catch (InterruptedException ie) { log.error("Could not clear the NFVO due to an InterruptedException"); } catch (Exception e) { log.error("Could not clear the NFVO. \nException message is: " + e.getMessage()); } } public static void main(String[] args) throws Exception { Properties properties = null; try { properties = loadProperties(); } catch (IOException e) { e.printStackTrace(); log.error(e.getMessage()); System.exit(42); } log.debug("Current properties set: " + properties); // Checking if the NFVO is running if (!Utils.isNfvoStarted(nfvoIp, nfvoPort)) { log.error("After 120 sec the NFVO is not started yet. Is there an error?"); System.exit(1); } NFVORequestor requestor = NfvoRequestorBuilder.create() .projectName(projectName) .nfvoIp(nfvoIp) .nfvoPort(nfvoPort) .password(nfvoPwd) .username(nfvoUsr) .sslEnabled(sslEnabled) .build(); if (args.length > 0 && args[0].equals("clean")) { log.info("Executing clean up of existing descriptors/records "); clearOrchestrator(requestor); System.exit(0); } // Checking command line arguments List<String> clArgs = Arrays.asList(args); List<URL> iniFileURLs = Utils.getURLFileList(properties.getProperty("integration-test-scenarios")); // Check if arguments passed are correct if (clArgs.size() > 0) { List<String> fileNames = Utils.getFileNames(iniFileURLs); for (String arg : clArgs) { if (!fileNames.contains(arg)) { log.warn( "The scenario name passed as argument " + arg + " does not refer to any existing scenarios in integration-test-scenarios folder"); } } } log.info("NFVO is reachable at " + nfvoIp + ":" + nfvoPort + ". Loading tests"); IntegrationTestManager itm = new IntegrationTestManager( "org.openbaton.integration.test.testers", requestor, projectName); long startTime, stopTime; boolean allTestsPassed = true; Map<String, String> results = new HashMap<>(); boolean executedTests = false; // shows that there was at least one test executed by the integration test for (URL url : iniFileURLs) { String[] splittedUrl = url.toString().split("/"); String name = splittedUrl[splittedUrl.length - 1]; if (clArgs.size() > 0 && !clArgs.contains( name)) // if test names are passed through the command line, only these will be executed { continue; } executedTests = true; startTime = System.currentTimeMillis(); if (!name.contains("stress")) { if (itm.runTestScenario(properties, url, name)) { stopTime = System.currentTimeMillis() - startTime; log.info( "Test: " + name + " finished correctly :) in " + String.format( "%d min, %d sec", TimeUnit.MILLISECONDS.toMinutes(stopTime), TimeUnit.MILLISECONDS.toSeconds(stopTime) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(stopTime))) + "\n"); results.put(name, "SUCCESS"); } else { log.error("Test: " + name + " completed with errors :(\n"); allTestsPassed = false; results.put(name, "FAILED"); } } if (clearAfterTest) { clearOrchestrator(requestor); } } if (!executedTests) { log.warn("No tests were executed."); System.exit(1); } log.info("Final results of the execution of the tests: \n"); String[] columns = {"Scenario Name", "Result"}; Utils.getResultsTable(columns, results).printTable(); System.out.println(); if (allTestsPassed) { System.exit(0); } else { System.exit(99); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.network.netty; import org.apache.flink.runtime.io.network.ConnectionID; import org.apache.flink.runtime.io.network.NetworkClientHandler; import org.apache.flink.runtime.io.network.netty.exception.LocalTransportException; import org.apache.flink.runtime.io.network.netty.exception.RemoteTransportException; import org.apache.flink.runtime.io.network.partition.consumer.RemoteInputChannel; import org.apache.flink.shaded.netty4.io.netty.channel.Channel; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelFuture; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelFutureListener; import java.io.IOException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * Factory for {@link PartitionRequestClient} instances. * * <p>Instances of partition requests clients are shared among several {@link RemoteInputChannel} * instances. */ class PartitionRequestClientFactory { private final NettyClient nettyClient; private final ConcurrentMap<ConnectionID, Object> clients = new ConcurrentHashMap<ConnectionID, Object>(); PartitionRequestClientFactory(NettyClient nettyClient) { this.nettyClient = nettyClient; } /** * Atomically establishes a TCP connection to the given remote address and * creates a {@link PartitionRequestClient} instance for this connection. */ PartitionRequestClient createPartitionRequestClient(ConnectionID connectionId) throws IOException, InterruptedException { Object entry; PartitionRequestClient client = null; while (client == null) { entry = clients.get(connectionId); if (entry != null) { // Existing channel or connecting channel if (entry instanceof PartitionRequestClient) { client = (PartitionRequestClient) entry; } else { ConnectingChannel future = (ConnectingChannel) entry; client = future.waitForChannel(); clients.replace(connectionId, future, client); } } else { // No channel yet. Create one, but watch out for a race. // We create a "connecting future" and atomically add it to the map. // Only the thread that really added it establishes the channel. // The others need to wait on that original establisher's future. ConnectingChannel connectingChannel = new ConnectingChannel(connectionId, this); Object old = clients.putIfAbsent(connectionId, connectingChannel); if (old == null) { nettyClient.connect(connectionId.getAddress()).addListener(connectingChannel); client = connectingChannel.waitForChannel(); clients.replace(connectionId, connectingChannel, client); } else if (old instanceof ConnectingChannel) { client = ((ConnectingChannel) old).waitForChannel(); clients.replace(connectionId, old, client); } else { client = (PartitionRequestClient) old; } } // Make sure to increment the reference count before handing a client // out to ensure correct bookkeeping for channel closing. if (!client.incrementReferenceCounter()) { destroyPartitionRequestClient(connectionId, client); client = null; } } return client; } public void closeOpenChannelConnections(ConnectionID connectionId) { Object entry = clients.get(connectionId); if (entry instanceof ConnectingChannel) { ConnectingChannel channel = (ConnectingChannel) entry; if (channel.dispose()) { clients.remove(connectionId, channel); } } } int getNumberOfActiveClients() { return clients.size(); } /** * Removes the client for the given {@link ConnectionID}. */ void destroyPartitionRequestClient(ConnectionID connectionId, PartitionRequestClient client) { clients.remove(connectionId, client); } private static final class ConnectingChannel implements ChannelFutureListener { private final Object connectLock = new Object(); private final ConnectionID connectionId; private final PartitionRequestClientFactory clientFactory; private boolean disposeRequestClient = false; public ConnectingChannel(ConnectionID connectionId, PartitionRequestClientFactory clientFactory) { this.connectionId = connectionId; this.clientFactory = clientFactory; } private boolean dispose() { boolean result; synchronized (connectLock) { if (partitionRequestClient != null) { result = partitionRequestClient.disposeIfNotUsed(); } else { disposeRequestClient = true; result = true; } connectLock.notifyAll(); } return result; } private void handInChannel(Channel channel) { synchronized (connectLock) { try { NetworkClientHandler clientHandler = channel.pipeline().get(NetworkClientHandler.class); partitionRequestClient = new PartitionRequestClient( channel, clientHandler, connectionId, clientFactory); if (disposeRequestClient) { partitionRequestClient.disposeIfNotUsed(); } connectLock.notifyAll(); } catch (Throwable t) { notifyOfError(t); } } } private volatile PartitionRequestClient partitionRequestClient; private volatile Throwable error; private PartitionRequestClient waitForChannel() throws IOException, InterruptedException { synchronized (connectLock) { while (error == null && partitionRequestClient == null) { connectLock.wait(2000); } } if (error != null) { throw new IOException("Connecting the channel failed: " + error.getMessage(), error); } return partitionRequestClient; } private void notifyOfError(Throwable error) { synchronized (connectLock) { this.error = error; connectLock.notifyAll(); } } @Override public void operationComplete(ChannelFuture future) throws Exception { if (future.isSuccess()) { handInChannel(future.channel()); } else if (future.cause() != null) { notifyOfError(new RemoteTransportException( "Connecting to remote task manager + '" + connectionId.getAddress() + "' has failed. This might indicate that the remote task " + "manager has been lost.", connectionId.getAddress(), future.cause())); } else { notifyOfError(new LocalTransportException( String.format( "Connecting to remote task manager '%s' has been cancelled.", connectionId.getAddress()), null)); } } } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lang.properties; import com.intellij.lang.properties.psi.PropertiesFile; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.NotNullLazyValue; import com.intellij.openapi.util.NullableComputable; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.util.Function; import com.intellij.util.SmartList; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.ArrayList; import java.util.List; import java.util.Set; /** * @author Dmitry Batkovich */ @State( name = "ResourceBundleManager", storages = { @Storage(file = StoragePathMacros.PROJECT_FILE), @Storage(file = StoragePathMacros.PROJECT_CONFIG_DIR + "/resourceBundles.xml", scheme = StorageScheme.DIRECTORY_BASED) }) public class ResourceBundleManager implements PersistentStateComponent<ResourceBundleManagerState> { private final static Logger LOG = Logger.getInstance(ResourceBundleManager.class); private ResourceBundleManagerState myState = new ResourceBundleManagerState(); public ResourceBundleManager(final PsiManager manager) { manager.addPsiTreeChangeListener(new PsiTreeChangeAdapter() { @Override public void childMoved(@NotNull final PsiTreeChangeEvent event) { final PsiElement child = event.getChild(); if (!(child instanceof PsiFile)) { if (child instanceof PsiDirectory) { if (event.getOldParent() instanceof PsiDirectory && event.getNewParent() instanceof PsiDirectory) { final String fromDirUrl = ((PsiDirectory)event.getOldParent()).getVirtualFile().getUrl() + "/"; final NotNullLazyValue<String> toDirUrl = new NotNullLazyValue<String>() { @NotNull @Override protected String compute() { return ((PsiDirectory)event.getNewParent()).getVirtualFile().getUrl() + "/"; } }; for (String dissociatedFileUrl : new SmartList<String>(myState.getDissociatedFiles())) { if (dissociatedFileUrl.startsWith(fromDirUrl)) { myState.getDissociatedFiles().remove(dissociatedFileUrl); myState.getDissociatedFiles().add(toDirUrl.getValue() + dissociatedFileUrl.substring(fromDirUrl.length())); } } for (CustomResourceBundleState customResourceBundleState : myState.getCustomResourceBundles()) { for (String fileUrl : new SmartList<String>(customResourceBundleState.getFileUrls())) { if (fileUrl.startsWith(fromDirUrl)) { customResourceBundleState.getFileUrls().remove(fileUrl); customResourceBundleState.getFileUrls().add(toDirUrl.getValue() + fileUrl.substring(fromDirUrl.length())); } } } } } return; } final PropertiesFile propertiesFile = PropertiesImplUtil.getPropertiesFile((PsiFile)child); if (propertiesFile == null) { return; } final String oldParentUrl = getUrl(event.getOldParent()); if (oldParentUrl == null) { return; } final String newParentUrl = getUrl(event.getNewParent()); if (newParentUrl == null) { return; } final NotNullLazyValue<Pair<String, String>> oldAndNewUrls = new NotNullLazyValue<Pair<String, String>>() { @NotNull @Override protected Pair<String, String> compute() { final String newUrl = propertiesFile.getVirtualFile().getUrl(); return Pair.create(oldParentUrl + newUrl.substring(newParentUrl.length()), newUrl); } }; if (!myState.getDissociatedFiles().isEmpty()) { if (myState.getDissociatedFiles().remove(oldAndNewUrls.getValue().getFirst())) { myState.getDissociatedFiles().add(oldAndNewUrls.getValue().getSecond()); } } for (CustomResourceBundleState customResourceBundleState : myState.getCustomResourceBundles()) { if (customResourceBundleState.getFileUrls().remove(oldAndNewUrls.getValue().getFirst())) { customResourceBundleState.getFileUrls().add(oldAndNewUrls.getValue().getSecond()); break; } } } @Nullable private String getUrl(PsiElement element) { return !(element instanceof PsiDirectory) ? null : ((PsiDirectory)element).getVirtualFile().getUrl(); } @Override public void beforeChildRemoval(@NotNull PsiTreeChangeEvent event) { final PsiElement child = event.getChild(); if (!(child instanceof PsiFile)) { if (child instanceof PsiDirectory) { final String deletedDirUrl = ((PsiDirectory)child).getVirtualFile().getUrl() + "/"; for (String dissociatedFileUrl : new SmartList<String>(myState.getDissociatedFiles())) { if (dissociatedFileUrl.startsWith(deletedDirUrl)) { myState.getDissociatedFiles().remove(dissociatedFileUrl); } } for (CustomResourceBundleState customResourceBundleState : new SmartList<CustomResourceBundleState>(myState.getCustomResourceBundles())) { for (String fileUrl : new ArrayList<String>(customResourceBundleState.getFileUrls())) { if (fileUrl.startsWith(deletedDirUrl)) { customResourceBundleState.getFileUrls().remove(fileUrl); } } if (customResourceBundleState.getFileUrls().size() < 2) { myState.getCustomResourceBundles().remove(customResourceBundleState); } } } return; }; PsiFile psiFile = (PsiFile)child; if (!PropertiesImplUtil.canBePropertyFile(psiFile)) return; final VirtualFile virtualFile = psiFile.getVirtualFile(); final NotNullLazyValue<String> url = new NotNullLazyValue<String>() { @NotNull @Override protected String compute() { return virtualFile.getUrl(); } }; if (!myState.getDissociatedFiles().isEmpty()) { myState.getDissociatedFiles().remove(url.getValue()); } for (CustomResourceBundleState customResourceBundleState : new SmartList<CustomResourceBundleState>(myState.getCustomResourceBundles())) { final Set<String> urls = customResourceBundleState.getFileUrls(); if (urls.remove(url.getValue())) { if (urls.size() < 2) { myState.getCustomResourceBundles().remove(customResourceBundleState); } break; } } } }); } public static ResourceBundleManager getInstance(final Project project) { return ServiceManager.getService(project, ResourceBundleManager.class); } @Nullable public String getFullName(final @NotNull PropertiesFile propertiesFile) { return ApplicationManager.getApplication().runReadAction(new NullableComputable<String>() { public String compute() { final PsiDirectory directory = propertiesFile.getParent(); final String packageQualifiedName = PropertiesUtil.getPackageQualifiedName(directory); if (packageQualifiedName == null) { return null; } final StringBuilder qName = new StringBuilder(packageQualifiedName); if (qName.length() > 0) { qName.append("."); } qName.append(getBaseName(propertiesFile.getContainingFile())); return qName.toString(); } }); } @NotNull public String getBaseName(@NotNull final PsiFile file) { return getBaseName(file.getVirtualFile()); } @NotNull private String getBaseName(@NotNull final VirtualFile file) { final CustomResourceBundleState customResourceBundle = getCustomResourceBundleState(file); if (customResourceBundle != null) { return customResourceBundle.getBaseName(); } if (isDefaultDissociated(file)) { return file.getNameWithoutExtension(); } return PropertiesUtil.getDefaultBaseName(file); } public void dissociateResourceBundle(final @NotNull ResourceBundle resourceBundle) { if (resourceBundle instanceof CustomResourceBundle) { final CustomResourceBundleState state = getCustomResourceBundleState(resourceBundle.getDefaultPropertiesFile().getVirtualFile()); LOG.assertTrue(state != null); myState.getCustomResourceBundles().remove(state); } else { if (EmptyResourceBundle.getInstance() != resourceBundle) { ((ResourceBundleImpl) resourceBundle).invalidate(); } for (final PropertiesFile propertiesFile : resourceBundle.getPropertiesFiles()) { final VirtualFile file = propertiesFile.getContainingFile().getVirtualFile(); myState.getDissociatedFiles().add(file.getUrl()); } } } public void combineToResourceBundle(final @NotNull List<PropertiesFile> propertiesFiles, final String baseName) { myState.getCustomResourceBundles() .add(new CustomResourceBundleState().addAll(ContainerUtil.map(propertiesFiles, new Function<PropertiesFile, String>() { @Override public String fun(PropertiesFile file) { return file.getVirtualFile().getUrl(); } })).setBaseName(baseName)); } @Nullable public CustomResourceBundle getCustomResourceBundle(final @NotNull PropertiesFile file) { final VirtualFile virtualFile = file.getVirtualFile(); if (virtualFile == null) { return null; } final CustomResourceBundleState state = getCustomResourceBundleState(virtualFile); return state == null ? null : CustomResourceBundle.fromState(state, file.getProject()); } public boolean isDefaultDissociated(final @NotNull VirtualFile virtualFile) { if (myState.getDissociatedFiles().isEmpty() && myState.getCustomResourceBundles().isEmpty()) { return false; } final String url = virtualFile.getUrl(); return myState.getDissociatedFiles().contains(url) || getCustomResourceBundleState(virtualFile) != null; } @Nullable private CustomResourceBundleState getCustomResourceBundleState(final @NotNull VirtualFile virtualFile) { if (myState.getCustomResourceBundles().isEmpty()) { return null; } final String url = virtualFile.getUrl(); for (CustomResourceBundleState customResourceBundleState : myState.getCustomResourceBundles()) { if (customResourceBundleState.getFileUrls().contains(url)) { return customResourceBundleState; } } return null; } @Nullable @Override public ResourceBundleManagerState getState() { return myState.isEmpty() ? null : myState; } @Override public void loadState(ResourceBundleManagerState state) { myState = state.removeNonExistentFiles(); } }
/* * Copyright 2015-2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.alerts.engine.impl; import java.util.HashMap; import java.util.Map; import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.Session; /** * PreparedStatements need to be prepared only one time for the Datastax driver. Avoid overhead and warnings by * caching the PreparedStatements in one place. * * @author Jay Shaughnessy * @author Lucas Ponce */ public class CassStatement { private static final String CASSANDRA_KEYSPACE = "hawkular-alerts.cassandra-keyspace"; private static final String keyspace; private static final Map<String, PreparedStatement> statementMap = new HashMap<>(); public static final String DELETE_ACTION_DEFINITION; public static final String DELETE_ACTION_HISTORY; public static final String DELETE_ACTION_HISTORY_ACTION; public static final String DELETE_ACTION_HISTORY_ALERT; public static final String DELETE_ACTION_HISTORY_CTIME; public static final String DELETE_ACTION_HISTORY_RESULT; public static final String DELETE_ACTION_PLUGIN; public static final String DELETE_ALERT; public static final String DELETE_ALERT_CTIME; public static final String DELETE_ALERT_SEVERITY; public static final String DELETE_ALERT_STATUS; public static final String DELETE_ALERT_TRIGGER; public static final String DELETE_CONDITIONS; public static final String DELETE_CONDITIONS_MODE; public static final String DELETE_DAMPENING_ID; public static final String DELETE_DAMPENINGS; public static final String DELETE_EVENT; public static final String DELETE_EVENT_CATEGORY; public static final String DELETE_EVENT_CTIME; public static final String DELETE_EVENT_TRIGGER; public static final String DELETE_TAG; public static final String DELETE_TRIGGER_ACTIONS; public static final String DELETE_TRIGGER; public static final String INSERT_ACTION_DEFINITION; public static final String INSERT_ACTION_HISTORY; public static final String INSERT_ACTION_HISTORY_ACTION; public static final String INSERT_ACTION_HISTORY_ALERT; public static final String INSERT_ACTION_HISTORY_CTIME; public static final String INSERT_ACTION_HISTORY_RESULT; public static final String INSERT_ACTION_PLUGIN; public static final String INSERT_ACTION_PLUGIN_DEFAULT_PROPERTIES; public static final String INSERT_ALERT; public static final String INSERT_ALERT_CTIME; public static final String INSERT_ALERT_SEVERITY; public static final String INSERT_ALERT_STATUS; public static final String INSERT_ALERT_TRIGGER; public static final String INSERT_CONDITION_AVAILABILITY; public static final String INSERT_CONDITION_COMPARE; public static final String INSERT_CONDITION_EVENT; public static final String INSERT_CONDITION_EXTERNAL; public static final String INSERT_CONDITION_RATE; public static final String INSERT_CONDITION_STRING; public static final String INSERT_CONDITION_THRESHOLD; public static final String INSERT_CONDITION_THRESHOLD_RANGE; public static final String INSERT_DAMPENING; public static final String INSERT_EVENT; public static final String INSERT_EVENT_CATEGORY; public static final String INSERT_EVENT_CTIME; public static final String INSERT_EVENT_TRIGGER; public static final String INSERT_TAG; public static final String INSERT_TRIGGER; public static final String INSERT_TRIGGER_ACTIONS; public static final String SELECT_ACTION_DEFINITION; public static final String SELECT_ACTION_HISTORY; public static final String SELECT_ACTION_HISTORY_ACTION_ID; public static final String SELECT_ACTION_HISTORY_ACTION_PLUGIN; public static final String SELECT_ACTION_HISTORY_ALERT_ID; public static final String SELECT_ACTION_HISTORY_BY_TENANT; public static final String SELECT_ACTION_HISTORY_CTIME_END; public static final String SELECT_ACTION_HISTORY_CTIME_START; public static final String SELECT_ACTION_HISTORY_CTIME_START_END; public static final String SELECT_ACTION_HISTORY_RESULT; public static final String SELECT_ACTION_ID_ALL; public static final String SELECT_ACTION_ID_BY_TENANT; public static final String SELECT_ACTION_ID_BY_PLUGIN; public static final String SELECT_ACTION_PLUGIN; public static final String SELECT_ACTION_PLUGIN_DEFAULT_PROPERTIES; public static final String SELECT_ACTION_PLUGINS; public static final String SELECT_ALERT; public static final String SELECT_ALERT_CTIME_END; public static final String SELECT_ALERT_CTIME_START; public static final String SELECT_ALERT_CTIME_START_END; public static final String SELECT_ALERT_STATUS; public static final String SELECT_ALERT_SEVERITY; public static final String SELECT_ALERT_TRIGGER; public static final String SELECT_ALERTS_BY_TENANT; public static final String SELECT_CONDITION_ID; public static final String SELECT_CONDITIONS_ALL; public static final String SELECT_CONDITIONS_BY_TENANT; public static final String SELECT_DAMPENING_ID; public static final String SELECT_DAMPENINGS_ALL; public static final String SELECT_DAMPENINGS_BY_TENANT; public static final String SELECT_EVENT; public static final String SELECT_EVENT_CATEGORY; public static final String SELECT_EVENT_CTIME_END; public static final String SELECT_EVENT_CTIME_START; public static final String SELECT_EVENT_CTIME_START_END; public static final String SELECT_EVENT_TRIGGER; public static final String SELECT_EVENTS_BY_TENANT; //public static final String SELECT_EVENTS_BY_PARTITION; // public static final String SELECT_PARTITIONS_EVENTS; public static final String SELECT_PARTITIONS_TRIGGERS; public static final String SELECT_TAGS_BY_NAME; public static final String SELECT_TAGS_BY_NAME_AND_VALUE; public static final String SELECT_TRIGGER; public static final String SELECT_TRIGGER_ACTIONS; public static final String SELECT_TRIGGER_CONDITIONS; public static final String SELECT_TRIGGER_CONDITIONS_TRIGGER_MODE; public static final String SELECT_TRIGGER_DAMPENINGS; public static final String SELECT_TRIGGER_DAMPENINGS_MODE; public static final String SELECT_TRIGGERS_ALL; public static final String SELECT_TRIGGERS_TENANT; public static final String UPDATE_ACTION_DEFINITION; public static final String UPDATE_ACTION_HISTORY; public static final String UPDATE_ACTION_PLUGIN; public static final String UPDATE_ACTION_PLUGIN_DEFAULT_PROPERTIES; public static final String UPDATE_ALERT; public static final String UPDATE_DAMPENING_ID; public static final String UPDATE_EVENT; public static final String UPDATE_TRIGGER; public static final String UPDATE_TRIGGER_ENABLED; static { keyspace = AlertProperties.getProperty(CASSANDRA_KEYSPACE, "hawkular_alerts"); DELETE_ACTION_DEFINITION = "DELETE FROM " + keyspace + ".actions_definitions " + "WHERE tenantId = ? AND actionPlugin = ? AND actionId = ? "; DELETE_ACTION_HISTORY = "DELETE FROM " + keyspace + ".actions_history " + "WHERE tenantId = ? AND actionPlugin = ? AND actionId = ? AND alertId = ? AND ctime = ?"; DELETE_ACTION_HISTORY_ACTION = "DELETE FROM " + keyspace + ".actions_history_actions " + "WHERE tenantId = ? AND actionId = ? AND actionPlugin = ? AND alertId = ? AND ctime = ?"; DELETE_ACTION_HISTORY_ALERT = "DELETE FROM " + keyspace + ".actions_history_alerts " + "WHERE tenantId = ? AND alertId = ? AND actionPlugin = ? AND actionId = ? AND ctime = ?"; DELETE_ACTION_HISTORY_CTIME = "DELETE FROM " + keyspace + ".actions_history_ctimes " + "WHERE tenantId = ? AND ctime = ? AND actionPlugin = ? AND actionId = ? AND alertId = ?"; DELETE_ACTION_HISTORY_RESULT = "DELETE FROM " + keyspace + ".actions_history_results " + "WHERE tenantId = ? AND result = ? AND actionPlugin = ? AND actionId = ? AND alertId = ? AND ctime = ?"; DELETE_ACTION_PLUGIN = "DELETE FROM " + keyspace + ".action_plugins WHERE actionPlugin = ? "; DELETE_ALERT = "DELETE FROM " + keyspace + ".alerts " + "WHERE tenantId = ? AND alertId = ? "; DELETE_ALERT_CTIME = "DELETE FROM " + keyspace + ".alerts_ctimes " + "WHERE tenantId = ? AND ctime = ? AND alertId = ? "; DELETE_ALERT_SEVERITY = "DELETE FROM " + keyspace + ".alerts_severities " + "WHERE tenantId = ? AND severity = ? AND alertId = ? "; DELETE_ALERT_STATUS = "DELETE FROM " + keyspace + ".alerts_statuses " + "WHERE tenantId = ? AND status = ? AND alertId = ? "; DELETE_ALERT_TRIGGER = "DELETE FROM " + keyspace + ".alerts_triggers " + "WHERE tenantId = ? AND triggerId = ? AND alertId = ? "; DELETE_CONDITIONS = "DELETE FROM " + keyspace + ".conditions " + "WHERE tenantId = ? AND triggerId = ? "; DELETE_CONDITIONS_MODE = "DELETE FROM " + keyspace + ".conditions " + "WHERE tenantId = ? AND triggerId = ? AND triggerMode = ? "; DELETE_DAMPENING_ID = "DELETE FROM " + keyspace + ".dampenings " + "WHERE tenantId = ? AND triggerId = ? AND triggerMode = ? AND dampeningId = ? "; DELETE_DAMPENINGS = "DELETE FROM " + keyspace + ".dampenings " + "WHERE tenantId = ? AND triggerId = ? "; DELETE_EVENT = "DELETE FROM " + keyspace + ".events " + "WHERE tenantId = ? AND id = ? "; DELETE_EVENT_CTIME = "DELETE FROM " + keyspace + ".events_ctimes " + "WHERE tenantId = ? AND ctime = ? AND id = ? "; DELETE_EVENT_CATEGORY = "DELETE FROM " + keyspace + ".events_categories " + "WHERE tenantId = ? AND category = ? AND id = ? "; DELETE_EVENT_TRIGGER = "DELETE FROM " + keyspace + ".events_triggers " + "WHERE tenantId = ? AND triggerId = ? AND id = ? "; DELETE_TAG = "DELETE FROM " + keyspace + ".tags " + "WHERE tenantId = ? AND type = ? AND name = ? and value = ? AND id = ?"; DELETE_TRIGGER_ACTIONS = "DELETE FROM " + keyspace + ".triggers_actions " + "WHERE tenantId = ? AND triggerId = ? "; DELETE_TRIGGER = "DELETE FROM " + keyspace + ".triggers " + "WHERE tenantId = ? AND id = ? "; INSERT_ACTION_DEFINITION = "INSERT INTO " + keyspace + ".actions_definitions " + "(tenantId, actionPlugin, actionId, payload) VALUES (?, ?, ?, ?) "; INSERT_ACTION_HISTORY = "INSERT INTO " + keyspace + ".actions_history " + "(tenantId, actionPlugin, actionId, alertId, ctime, payload) VALUES (?, ?, ?, ?, ?, ?) " + "IF NOT EXISTS"; INSERT_ACTION_HISTORY_ACTION = "INSERT INTO " + keyspace + ".actions_history_actions " + "(tenantId, actionId, actionPlugin, alertId, ctime) VALUES (?, ?, ?, ?, ?) " + "IF NOT EXISTS"; INSERT_ACTION_HISTORY_ALERT = "INSERT INTO " + keyspace + ".actions_history_alerts " + "(tenantId, alertId, actionPlugin, actionId, ctime) VALUES (?, ?, ?, ?, ?) " + "IF NOT EXISTS"; INSERT_ACTION_HISTORY_CTIME = "INSERT INTO " + keyspace + ".actions_history_ctimes " + "(tenantId, ctime, actionPlugin, actionId, alertId) VALUES (?, ?, ?, ?, ?) " + "IF NOT EXISTS"; INSERT_ACTION_HISTORY_RESULT = "INSERT INTO " + keyspace + ".actions_history_results " + "(tenantId, result, actionPlugin, actionId, alertId, ctime) VALUES (?, ?, ?, ?, ?, ?) " + "IF NOT EXISTS"; INSERT_ACTION_PLUGIN = "INSERT INTO " + keyspace + ".action_plugins " + "(actionPlugin, properties) VALUES (?, ?) "; INSERT_ACTION_PLUGIN_DEFAULT_PROPERTIES = "INSERT INTO " + keyspace + ".action_plugins " + "(actionPlugin, properties, defaultProperties) VALUES (?, ?, ?) "; INSERT_ALERT = "INSERT INTO " + keyspace + ".alerts " + "(tenantId, alertId, payload) VALUES (?, ?, ?) "; INSERT_ALERT_CTIME = "INSERT INTO " + keyspace + ".alerts_ctimes " + "(tenantId, alertId, ctime) VALUES (?, ?, ?) "; INSERT_ALERT_SEVERITY = "INSERT INTO " + keyspace + ".alerts_severities " + "(tenantId, alertId, severity) VALUES (?, ?, ?) "; INSERT_ALERT_STATUS = "INSERT INTO " + keyspace + ".alerts_statuses " + "(tenantId, alertId, status) VALUES (?, ?, ?) "; INSERT_ALERT_TRIGGER = "INSERT INTO " + keyspace + ".alerts_triggers " + "(tenantId, alertId, triggerId) VALUES (?, ?, ?) "; INSERT_CONDITION_AVAILABILITY = "INSERT INTO " + keyspace + ".conditions " + "(tenantId, triggerId, triggerMode, type, context, conditionSetSize, conditionSetIndex, " + "conditionId, dataId, operator) VALUES (?, ?, ?, 'AVAILABILITY', ?, ?, ?, ?, ?, ?) "; INSERT_CONDITION_COMPARE = "INSERT INTO " + keyspace + ".conditions " + "(tenantId, triggerId, triggerMode, type, context, conditionSetSize, conditionSetIndex, " + "conditionId, dataId, operator, data2Id, data2Multiplier) " + "VALUES (?, ?, ?, 'COMPARE', ?, ?, ?, ?, ?, ?, ?, ?) "; INSERT_CONDITION_EVENT = "INSERT INTO " + keyspace + ".conditions " + "(tenantId, triggerId, triggerMode, type, context, conditionSetSize, conditionSetIndex, " + "conditionId, dataId, pattern) VALUES (?, ?, ?, 'EVENT', ?, ?, ?, ?, ?, ?) "; INSERT_CONDITION_EXTERNAL = "INSERT INTO " + keyspace + ".conditions " + "(tenantId, triggerId, triggerMode, type, context, conditionSetSize, conditionSetIndex, " + "conditionId, dataId, operator, pattern) VALUES (?, ?, ?, 'EXTERNAL', ?, ?, ?, ?, ?, ?, ?) "; INSERT_CONDITION_RATE = "INSERT INTO " + keyspace + ".conditions " + "(tenantId, triggerId, triggerMode, type, context, conditionSetSize, conditionSetIndex, " + "conditionId, dataId, direction, period, operator, threshold) " + "VALUES (?, ?, ?, 'RATE', ?, ?, ?, ?, ?, ?, ?, ?, ?) "; INSERT_CONDITION_STRING = "INSERT INTO " + keyspace + ".conditions " + "(tenantId, triggerId, triggerMode, type, context, conditionSetSize, conditionSetIndex, " + "conditionId, dataId, operator, pattern, ignoreCase) " + "VALUES (?, ?, ?, 'STRING', ?, ?, ?, ?, ?, ?, ?, ?) "; INSERT_CONDITION_THRESHOLD = "INSERT INTO " + keyspace + ".conditions " + "(tenantId, triggerId, triggerMode, type, context, conditionSetSize, conditionSetIndex, " + "conditionId, dataId, operator, threshold) VALUES (?, ?, ?, 'THRESHOLD', ?, ?, ?, ?, ?, ?, ?) "; INSERT_CONDITION_THRESHOLD_RANGE = "INSERT INTO " + keyspace + ".conditions " + "(tenantId, triggerId, triggerMode, type, context, conditionSetSize, conditionSetIndex, " + "conditionId, dataId, operatorLow, operatorHigh, thresholdLow, thresholdHigh, inRange) " + "VALUES (?, ?, ?, 'RANGE', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) "; INSERT_DAMPENING = "INSERT INTO " + keyspace + ".dampenings " + "(tenantId, triggerId, triggerMode, type, evalTrueSetting, evalTotalSetting, evalTimeSetting, " + "dampeningId) VALUES (?, ?, ?, ?, ?, ?, ?, ?) "; INSERT_EVENT = "INSERT INTO " + keyspace + ".events " + "(tenantId, id, payload) VALUES (?, ?, ?) "; INSERT_EVENT_CTIME = "INSERT INTO " + keyspace + ".events_ctimes " + "(tenantId, ctime, id) VALUES (?, ?, ?) "; INSERT_EVENT_CATEGORY = "INSERT INTO " + keyspace + ".events_categories " + "(tenantId, category, id) VALUES (?, ?, ?) "; INSERT_EVENT_TRIGGER = "INSERT INTO " + keyspace + ".events_triggers " + "(tenantId, triggerId, id) VALUES (?, ?, ?) "; INSERT_TAG = "INSERT INTO " + keyspace + ".tags " + "(tenantId, type, name, value, id) VALUES (?, ?, ?, ?, ?) "; INSERT_TRIGGER = "INSERT INTO " + keyspace + ".triggers " + "(tenantId, id, autoDisable, autoEnable, autoResolve, autoResolveAlerts, autoResolveMatch, " + "context, description, enabled, eventCategory, eventText, eventType, firingMatch, memberOf, name, " + "severity, source, tags, type) " + "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) "; INSERT_TRIGGER_ACTIONS = "INSERT INTO " + keyspace + ".triggers_actions " + "(tenantId, triggerId, actionPlugin, actionId, payload) VALUES (?, ?, ?, ?, ?) "; SELECT_ACTION_DEFINITION = "SELECT payload FROM " + keyspace + ".actions_definitions " + "WHERE tenantId = ? AND actionPlugin = ? AND actionId = ? "; SELECT_ACTION_HISTORY = "SELECT payload FROM " + keyspace + ".actions_history " + "WHERE tenantId = ? AND actionPlugin = ? AND actionId = ? AND alertId = ? and ctime = ?"; SELECT_ACTION_HISTORY_ACTION_ID = "SELECT tenantId, actionPlugin, actionId, alertId, ctime FROM " + keyspace + ".actions_history_actions WHERE tenantId = ? AND actionId = ?"; SELECT_ACTION_HISTORY_ACTION_PLUGIN = "SELECT tenantId, actionPlugin, actionId, alertId, ctime FROM " + keyspace + ".actions_history WHERE tenantId = ? AND actionPlugin = ?"; SELECT_ACTION_HISTORY_ALERT_ID = "SELECT tenantId, actionPlugin, actionId, alertId, ctime FROM " + keyspace + ".actions_history_alerts WHERE tenantId = ? AND alertId = ?"; SELECT_ACTION_HISTORY_BY_TENANT = "SELECT payload FROM " + keyspace + ".actions_history " + "WHERE tenantId = ?"; SELECT_ACTION_HISTORY_CTIME_END = "SELECT tenantId, actionPlugin, actionId, alertId, ctime FROM " + keyspace + ".actions_history_ctimes WHERE tenantId = ? AND ctime <= ?"; SELECT_ACTION_HISTORY_CTIME_START = "SELECT tenantId, actionPlugin, actionId, alertId, ctime FROM " + keyspace + ".actions_history_ctimes WHERE tenantId = ? AND ctime >= ?"; SELECT_ACTION_HISTORY_CTIME_START_END = "SELECT tenantId, actionPlugin, actionId, alertId, ctime FROM " + keyspace + ".actions_history_ctimes WHERE tenantId = ? AND ctime >= ? AND ctime <= ?"; SELECT_ACTION_HISTORY_RESULT = "SELECT tenantId, actionPlugin, actionId, alertId, ctime FROM " + keyspace + ".actions_history_results WHERE tenantId = ? AND result = ?"; SELECT_ACTION_ID_ALL = "SELECT tenantId, actionPlugin, actionId " + "FROM " + keyspace + ".actions_definitions "; SELECT_ACTION_ID_BY_TENANT = "SELECT actionPlugin, actionId " + "FROM " + keyspace + ".actions_definitions " + "WHERE tenantId = ? "; SELECT_ACTION_PLUGIN = "SELECT properties FROM " + keyspace + ".action_plugins " + "WHERE actionPlugin = ? "; SELECT_ACTION_PLUGIN_DEFAULT_PROPERTIES = "SELECT defaultProperties FROM " + keyspace + ".action_plugins " + "WHERE actionPlugin = ? "; SELECT_ACTION_PLUGINS = "SELECT actionPlugin FROM " + keyspace + ".action_plugins"; SELECT_ACTION_ID_BY_PLUGIN = "SELECT actionId FROM " + keyspace + ".actions_definitions " + "WHERE tenantId = ? AND actionPlugin = ? "; SELECT_ALERT = "SELECT payload FROM " + keyspace + ".alerts " + "WHERE tenantId = ? AND alertId = ? "; SELECT_ALERT_CTIME_END = "SELECT alertId FROM " + keyspace + ".alerts_ctimes " + "WHERE tenantId = ? AND ctime <= ? "; SELECT_ALERT_CTIME_START = "SELECT alertId FROM " + keyspace + ".alerts_ctimes " + "WHERE tenantId = ? AND ctime >= ? "; SELECT_ALERT_CTIME_START_END = "SELECT alertId FROM " + keyspace + ".alerts_ctimes " + "WHERE tenantId = ? AND ctime >= ? AND ctime <= ? "; SELECT_ALERT_SEVERITY = "SELECT alertId FROM " + keyspace + ".alerts_severities " + "WHERE tenantId = ? AND severity = ? "; SELECT_ALERT_STATUS = "SELECT alertId FROM " + keyspace + ".alerts_statuses " + "WHERE tenantId = ? AND status = ? "; SELECT_ALERTS_BY_TENANT = "SELECT payload FROM " + keyspace + ".alerts " + "WHERE tenantId = ? "; SELECT_ALERT_TRIGGER = "SELECT alertId FROM " + keyspace + ".alerts_triggers " + "WHERE tenantId = ? AND triggerId = ? "; SELECT_CONDITION_ID = "SELECT triggerId, triggerMode, type, conditionSetSize, " + "conditionSetIndex, conditionId, dataId, operator, data2Id, data2Multiplier, pattern, " + "ignoreCase, threshold, operatorLow, operatorHigh, thresholdLow, thresholdHigh, inRange, " + "direction, period, tenantId, context " + "FROM " + keyspace + ".conditions " + "WHERE tenantId = ? AND conditionId = ? "; SELECT_CONDITIONS_ALL = "SELECT triggerId, triggerMode, type, conditionSetSize, " + "conditionSetIndex, conditionId, dataId, operator, data2Id, data2Multiplier, pattern, " + "ignoreCase, threshold, operatorLow, operatorHigh, thresholdLow, thresholdHigh, inRange, " + "direction, period, tenantId, context " + "FROM " + keyspace + ".conditions "; SELECT_CONDITIONS_BY_TENANT = "SELECT triggerId, triggerMode, type, conditionSetSize, " + "conditionSetIndex, conditionId, dataId, operator, data2Id, data2Multiplier, pattern, " + "ignoreCase, threshold, operatorLow, operatorHigh, thresholdLow, thresholdHigh, inRange, " + "direction, period, tenantId, context " + "FROM " + keyspace + ".conditions " + "WHERE tenantId = ? "; SELECT_DAMPENING_ID = "SELECT triggerId, triggerMode, type, evalTrueSetting, " + "evalTotalSetting, evalTimeSetting, dampeningId, tenantId " + "FROM " + keyspace + ".dampenings " + "WHERE tenantId = ? AND dampeningId = ? "; SELECT_DAMPENINGS_ALL = "SELECT tenantId, triggerId, triggerMode, type, evalTrueSetting, " + "evalTotalSetting, evalTimeSetting, dampeningId " + "FROM " + keyspace + ".dampenings "; SELECT_DAMPENINGS_BY_TENANT = "SELECT tenantId, triggerId, triggerMode, type, " + "evalTrueSetting, " + "evalTotalSetting, evalTimeSetting, dampeningId " + "FROM " + keyspace + ".dampenings " + "WHERE tenantId = ? "; SELECT_EVENT = "SELECT payload FROM " + keyspace + ".events " + "WHERE tenantId = ? AND id = ? "; SELECT_EVENT_CATEGORY = "SELECT id FROM " + keyspace + ".events_categories " + "WHERE tenantId = ? AND category = ? "; SELECT_EVENT_CTIME_END = "SELECT id FROM " + keyspace + ".events_ctimes " + "WHERE tenantId = ? AND ctime <= ? "; SELECT_EVENT_CTIME_START = "SELECT id FROM " + keyspace + ".events_ctimes " + "WHERE tenantId = ? AND ctime >= ? "; SELECT_EVENT_CTIME_START_END = "SELECT id FROM " + keyspace + ".events_ctimes " + "WHERE tenantId = ? AND ctime >= ? AND ctime <= ? "; SELECT_EVENT_TRIGGER = "SELECT id FROM " + keyspace + ".events_triggers " + "WHERE tenantId = ? AND triggerId = ? "; //SELECT_EVENTS_BY_PARTITION = "SELECT payload FROM " + keyspace + ".events " // + "WHERE tenantId = ? AND category = ? "; SELECT_EVENTS_BY_TENANT = "SELECT payload FROM " + keyspace + ".events " + "WHERE tenantId = ? "; // This is for use as a pre-query to gather all partitions to be subsequently queried. If the // partition key changes this should also change. // SELECT_PARTITIONS_EVENTS = "SELECT DISTINCT tenantid, category FROM " + keyspace + ".events "; // This is for use as a pre-query to gather all partitions to be subsequently queried. If the // partition key changes this should also change. SELECT_PARTITIONS_TRIGGERS = "SELECT DISTINCT tenantid FROM " + keyspace + ".triggers "; SELECT_TAGS_BY_NAME = "SELECT tenantId, value, id " + "FROM " + keyspace + ".tags " + "WHERE tenantId = ? AND type = ? and name = ? "; SELECT_TAGS_BY_NAME_AND_VALUE = "SELECT tenantId, id " + "FROM " + keyspace + ".tags " + "WHERE tenantId = ? AND type = ? and name = ? AND value = ? "; SELECT_TRIGGER = "SELECT tenantId, id, autoDisable, autoEnable, autoResolve, autoResolveAlerts, " + "autoResolveMatch, context, description, enabled, eventCategory, eventText, eventType, " + "firingMatch, memberOf, name, severity, source, tags, type " + "FROM " + keyspace + ".triggers " + "WHERE tenantId = ? AND id = ? "; SELECT_TRIGGER_ACTIONS = "SELECT tenantId, triggerId, actionPlugin, actionId, payload " + "FROM " + keyspace + ".triggers_actions " + "WHERE tenantId = ? AND triggerId = ? "; SELECT_TRIGGER_CONDITIONS = "SELECT triggerId, triggerMode, type, conditionSetSize, " + "conditionSetIndex, conditionId, dataId, operator, data2Id, data2Multiplier, pattern, " + "ignoreCase, threshold, operatorLow, operatorHigh, thresholdLow, thresholdHigh, inRange, " + "direction, period, tenantId, context " + "FROM " + keyspace + ".conditions " + "WHERE tenantId = ? AND triggerId = ?"; SELECT_TRIGGER_CONDITIONS_TRIGGER_MODE = "SELECT triggerId, triggerMode, type, conditionSetSize, " + "conditionSetIndex, conditionId, dataId, operator, data2Id, data2Multiplier, pattern, ignoreCase, " + "threshold, operatorLow, operatorHigh, thresholdLow, thresholdHigh, inRange, " + "direction, period, tenantId, context " + "FROM " + keyspace + ".conditions " + "WHERE tenantId = ? AND triggerId = ? AND triggerMode = ? "; SELECT_TRIGGER_DAMPENINGS = "SELECT tenantId, triggerId, triggerMode, type, " + "evalTrueSetting, evalTotalSetting, evalTimeSetting, dampeningId " + "FROM " + keyspace + ".dampenings " + "WHERE tenantId = ? AND triggerId = ? "; SELECT_TRIGGER_DAMPENINGS_MODE = "SELECT tenantId, triggerId, triggerMode, type, " + "evalTrueSetting, evalTotalSetting, evalTimeSetting, dampeningId " + "FROM " + keyspace + ".dampenings " + "WHERE tenantId = ? AND triggerId = ? and triggerMode = ? "; SELECT_TRIGGERS_ALL = "SELECT tenantId, id, autoDisable, autoEnable, autoResolve, autoResolveAlerts, " + "autoResolveMatch, context, description, enabled, eventCategory, eventText, eventType, " + "firingMatch, memberOf, name, severity, source, tags, type " + "FROM " + keyspace + ".triggers "; SELECT_TRIGGERS_TENANT = "SELECT tenantId, id, autoDisable, autoEnable, autoResolve, autoResolveAlerts, " + "autoResolveMatch, context, description, enabled, eventCategory, eventText, eventType, " + "firingMatch, memberOf, name, severity, source, tags, type " + "FROM " + keyspace + ".triggers WHERE tenantId = ? "; UPDATE_ACTION_DEFINITION = "UPDATE " + keyspace + ".actions_definitions SET payload = ? " + "WHERE tenantId = ? AND actionPlugin = ? AND actionId = ? "; UPDATE_ACTION_HISTORY = "UPDATE " + keyspace + ".actions_history " + "SET payload = ? " + "WHERE tenantId = ? AND actionPlugin = ? AND actionId = ? AND alertId = ? AND ctime = ?"; UPDATE_ACTION_PLUGIN = "UPDATE " + keyspace + ".action_plugins SET properties = ? WHERE actionPlugin = ? "; UPDATE_ACTION_PLUGIN_DEFAULT_PROPERTIES = "UPDATE " + keyspace + ".action_plugins " + "SET properties = ?, defaultProperties = ? WHERE actionPlugin = ? "; UPDATE_ALERT = "UPDATE " + keyspace + ".alerts SET payload = ? WHERE tenantId = ? AND alertId = ? "; UPDATE_DAMPENING_ID = "UPDATE " + keyspace + ".dampenings " + "SET type = ?, evalTrueSetting = ?, evalTotalSetting = ?, evalTimeSetting = ? " + "WHERE tenantId = ? AND triggerId = ? AND triggerMode = ? AND dampeningId = ? "; UPDATE_EVENT = "UPDATE " + keyspace + ".events SET payload = ? WHERE tenantId = ? AND id = ? "; UPDATE_TRIGGER = "UPDATE " + keyspace + ".triggers " + "SET autoDisable = ?, autoEnable = ?, autoResolve = ?, autoResolveAlerts = ?, autoResolveMatch = ?, " + "context = ?, description = ?, enabled = ?, eventCategory = ?, eventText = ?, firingMatch = ?, " + "memberOf = ?, name = ?, severity = ?, source = ?, tags = ?, type = ? " + "WHERE tenantId = ? AND id = ? "; UPDATE_TRIGGER_ENABLED = "UPDATE " + keyspace + ".triggers " + "SET enabled = ? WHERE tenantId = ? AND id = ? "; } public static synchronized PreparedStatement get(Session session, String statement) { PreparedStatement result = statementMap.get(statement); if (null == result) { result = session.prepare(statement); statementMap.put(statement, result); } return result; } }
package be.jabapage.controllers; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.verify; import java.util.ArrayList; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import be.jabapage.MockModule; import be.jabapage.exceptions.PartyNotInListException; import be.jabapage.services.api.IElectionPredictionService; import be.jabapage.vos.ElectionPartyVo; import be.jabapage.vos.ElectionPredictionConfigurationVo; import be.jabapage.vos.ElectionPredictionVo; import be.jabapage.vos.OnChangeListener; @RunWith(MockitoJUnitRunner.class) @Deprecated public class ElectionPredictionControllerTest { private ElectionPredictionController controller; private ElectionPredictionVo model; @Mock private ElectionPredictionConfigurationVo configuration; private MockModule mockModule = new MockModule(); @Mock private OnChangeListener<ElectionPredictionVo> listener; /* * public static final int UPDATE_CONFIGURATION = 1; public static final int PREDICT = 2; public static final int REFRESH = 3; public static final int ADD_PARTY = 4; public static final int REMOVE_PARTY = 5; public static final int UPDATE_AMOUNT_OF_PLACES = 6; public static final int UPDATE_PARTY = 7; */ @Before public void setUp() throws Exception { model = new ElectionPredictionVo(); controller = new ElectionPredictionController(model, mockModule); model.addListener(listener); } @Test(expected=NullPointerException.class) public void testHandleMessageSetConfigurationNullArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.SET_CONFIGURATION); } @Test(expected=IllegalArgumentException.class) public void testHandleMessageSetConfigurationWrongArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.SET_CONFIGURATION, "wrong argument"); } @Test public void testHandleMessageSetConfiguration() throws Exception{ controller.handleMessage(ElectionPredictionController.SET_CONFIGURATION, configuration); ArgumentCaptor<ElectionPredictionVo> argument = ArgumentCaptor.forClass(ElectionPredictionVo.class); verify(listener).onChange(argument.capture()); assertEquals(model, argument.getValue()); assertEquals(configuration, argument.getValue().getConfiguration()); } @Test(expected=NullPointerException.class) public void testHandleMessageUpdateListPredefinedPartiesNullArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.UPDATE_LIST_PREDEFINED_PARTIES); } @Test(expected=IllegalArgumentException.class) public void testHandleMessageUpdateListPredefinedPartiesWrongArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.UPDATE_LIST_PREDEFINED_PARTIES, "wrong argument"); } @Test public void testHandleMessageUpdateListPredefinedParties() throws Exception{ List<String> parties = new ArrayList<String>(); parties.add("partyname"); controller.handleMessage(ElectionPredictionController.UPDATE_LIST_PREDEFINED_PARTIES, parties); ArgumentCaptor<ElectionPredictionVo> argument = ArgumentCaptor.forClass(ElectionPredictionVo.class); verify(listener).onChange(argument.capture()); assertEquals(model, argument.getValue()); assertNotNull(argument.getValue().getConfiguration()); assertEquals(parties, argument.getValue().getConfiguration().getPreconfiguredPartyNames()); } @Test(expected=NullPointerException.class) public void testHandleMessageAddPredefinedPartyNullArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.ADD_PREDEFINED_PARTY); } @Test(expected=IllegalArgumentException.class) public void testHandleMessageAddPredefinedPartyWrongArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.ADD_PREDEFINED_PARTY, Integer.valueOf(1)); } @Test public void testHandleMessageAddPredefinedParty() throws Exception{ String party = "partyName"; controller.handleMessage(ElectionPredictionController.ADD_PREDEFINED_PARTY, party); ArgumentCaptor<ElectionPredictionVo> argument = ArgumentCaptor.forClass(ElectionPredictionVo.class); verify(listener).onChange(argument.capture()); assertEquals(model, argument.getValue()); ElectionPredictionConfigurationVo configuration = argument.getValue().getConfiguration(); assertNotNull(configuration); assertNotNull(configuration.getPreconfiguredPartyNames()); assertTrue(configuration.getPreconfiguredPartyNames().contains(party)); } @Test(expected=NullPointerException.class) public void testHandleMessageRemovePredefinedPartyNullArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.REMOVE_PREDEFINED_PARTY); } @Test(expected=IllegalArgumentException.class) public void testHandleMessageRemovePredefinedPartyWrongArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.REMOVE_PREDEFINED_PARTY, Integer.valueOf(1)); } @Test public void testHandleMessageRemovePredefinedParty() throws Exception{ String party = "partyName"; model.getConfiguration().addPreConfiguredPartyName(party); controller.handleMessage(ElectionPredictionController.REMOVE_PREDEFINED_PARTY, party); ArgumentCaptor<ElectionPredictionVo> argument = ArgumentCaptor.forClass(ElectionPredictionVo.class); verify(listener).onChange(argument.capture()); assertEquals(model, argument.getValue()); ElectionPredictionConfigurationVo configuration = argument.getValue().getConfiguration(); assertNotNull(configuration); assertNotNull(configuration.getPreconfiguredPartyNames()); assertFalse(configuration.getPreconfiguredPartyNames().contains(party)); } @Test public void testHandleMessageRefresh() throws Exception{ controller.handleMessage(ElectionPredictionController.REFRESH); verify(listener).onChange(model); } @Test(expected=NullPointerException.class) public void testHandleMessageAddPartyNullArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.ADD_PARTY); } @Test(expected=IllegalArgumentException.class) public void testHandleMessageAddPartyWrongArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.ADD_PARTY, "wrong argument"); } @Test public void testHandleMessageAddPartyAlreadyExists() throws Exception{ ElectionPartyVo party = new ElectionPartyVo(); party.setName("partyName"); controller.handleMessage(ElectionPredictionController.ADD_PARTY, party); controller.handleMessage(ElectionPredictionController.ADD_PARTY, party); assertEquals(2, model.getParties().size()); } @Test public void testHandleMessageAddParty() throws Exception{ ElectionPartyVo party = new ElectionPartyVo(); party.setName("partyName"); controller.handleMessage(ElectionPredictionController.ADD_PARTY, party); ArgumentCaptor<ElectionPredictionVo> argument = ArgumentCaptor.forClass(ElectionPredictionVo.class); verify(listener).onChange(argument.capture()); assertEquals(model, argument.getValue()); List<ElectionPartyVo> parties = argument.getValue().getParties(); assertNotNull(parties); assertTrue(parties.contains(party)); } @Test(expected=NullPointerException.class) public void testHandleMessageRemovePartyNullArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.REMOVE_PARTY); } @Test(expected=IllegalArgumentException.class) public void testHandleMessageRemovePartyWrongArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.REMOVE_PARTY, "wrong argument"); } @Test public void testHandleMessageRemoveParty() throws Exception{ ElectionPartyVo party = new ElectionPartyVo(); party.setName("partyName"); model.getParties().add(party); controller.handleMessage(ElectionPredictionController.REMOVE_PARTY, party); ArgumentCaptor<ElectionPredictionVo> argument = ArgumentCaptor.forClass(ElectionPredictionVo.class); verify(listener).onChange(argument.capture()); assertEquals(model, argument.getValue()); List<ElectionPartyVo> parties = argument.getValue().getParties(); assertNotNull(parties); assertFalse(parties.contains(party)); } @Test(expected=NullPointerException.class) public void testHandleMessageUpdateAmountOfPlacesNullArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.UPDATE_AMOUNT_OF_PLACES); } @Test(expected=IllegalArgumentException.class) public void testHandleMessageUpdateAmountOfPlacesWrongArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.UPDATE_AMOUNT_OF_PLACES, "wrong argument"); } @Test public void testHandleMessageUpdateAmountOfPlaces() throws Exception{ int data = 21; controller.handleMessage(ElectionPredictionController.UPDATE_AMOUNT_OF_PLACES, data); ArgumentCaptor<ElectionPredictionVo> argument = ArgumentCaptor.forClass(ElectionPredictionVo.class); verify(listener).onChange(argument.capture()); assertEquals(model, argument.getValue()); assertEquals(data, model.getAmountOfPlaces()); } @Test(expected=NullPointerException.class) public void testHandleMessageUpdatePartyNullArgument() throws Exception { controller.handleMessage(ElectionPredictionController.UPDATE_PARTY); } @Test(expected=IllegalArgumentException.class) public void testHandleMessageUpdatePartyWrongArgument() throws Exception{ controller.handleMessage(ElectionPredictionController.UPDATE_PARTY, "wrong argument"); } @Test(expected=PartyNotInListException.class) public void testHandleMessageUpdatePartyNotExisting() throws Exception { ElectionPartyVo party = new ElectionPartyVo(); party.setName("partyName"); controller.handleMessage(ElectionPredictionController.UPDATE_PARTY, party); } @Test public void testHandleMessageUpdateParty() throws Exception { ElectionPartyVo party = new ElectionPartyVo(); String partyName = "partyName"; party.setName(partyName); model.getParties().add(party); ElectionPartyVo party2 = new ElectionPartyVo(); party2.setName(partyName); party2.setNumberOfVotes(50); controller.handleMessage(ElectionPredictionController.UPDATE_PARTY, party2); ArgumentCaptor<ElectionPredictionVo> argument = ArgumentCaptor.forClass(ElectionPredictionVo.class); verify(listener).onChange(argument.capture()); assertEquals(model, argument.getValue()); List<ElectionPartyVo> parties = argument.getValue().getParties(); assertNotNull(parties); assertTrue(parties.contains(party)); //TODO check the update itself. } @Test public void testHandleMessagePredict() throws Exception { IElectionPredictionService predictionService = mockModule.getPredictionService(); controller.handleMessage(ElectionPredictionController.PREDICT); verify(predictionService).predictPlaces(model); verify(listener).onChange(model); } @Test(expected=NullPointerException.class) public void testElectionPredictionControllerNullModel() { new ElectionPredictionController(null, mockModule); } @Test(expected=NullPointerException.class) public void testElectionPredictionControllerNullModule() { new ElectionPredictionController(model, null); } }
package com.alexfiliakov.blackscholescalc.OptionPriceModel; import javafx.scene.control.TableColumn; import javax.management.monitor.Monitor; import javax.swing.JLabel; import javax.swing.JTable; import javax.swing.SwingConstants; import javax.swing.UIManager; import javax.swing.border.CompoundBorder; import javax.swing.border.EmptyBorder; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableModel; import org.apache.commons.math3.distribution.NormalDistribution; import com.alexfiliakov.blackscholescalc.StockModel.StockModel; import com.sun.xml.internal.ws.api.Component; import sun.net.www.content.text.plain; import sun.swing.table.DefaultTableCellHeaderRenderer; import sun.util.spi.CalendarProvider; import java.awt.Color; import java.awt.Font; import java.awt.Insets; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.Locale; import java.util.StringTokenizer; import java.math.RoundingMode; import java.text.DecimalFormat; public class OptionPriceTableModel { /** * This class generates the table of option prices for the underlying stock */ final private static String[] COLUMN_NAMES = {"Expiration", "Strike", "Call Price", "Put Price"}; final private static int TRADING_DAYS_IN_YEAR = 252; private TableModel tableModel; //holds option prices // creates the price table data public OptionPriceTableModel(StockModel stockModel) throws Exception { // Black-Scholes is calculated in years, make sure all units are in years! double riskFreeRate = getRiskFreeRate(); double riskFreeForce = Math.log(1+riskFreeRate/100); double spotPrice = getSpotPrice(stockModel.getStockSymbol()); // get asset price (last close price) double annualVolatility = stockModel.getAnnualVolatility(); Calendar[] expirationDays = getExpirationDays(); // list of expiration days to calculate SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy"); // calculate option prices at Strike Prices based on weekly SD deviation in returns double[] strikePricesArr = new double [7]; for (int i=-3; i<=3; i++) { strikePricesArr[i+3] = spotPrice * Math.exp(i*annualVolatility/Math.sqrt(52)); } String[][] rowData = new String[expirationDays.length * strikePricesArr.length][COLUMN_NAMES.length]; for (int expDayIndex=0; expDayIndex < expirationDays.length; expDayIndex++) { for (int strikePriceIndex=0; strikePriceIndex < strikePricesArr.length; strikePriceIndex++) { int rowDataIndex = expDayIndex*strikePricesArr.length + strikePriceIndex; // index of the data row (getting multiple Strike Prices for each Expiration Day) rowData[rowDataIndex][0] = sdf.format(expirationDays[expDayIndex].getTime()); // set to format numbers to 2 decimals for strike prices DecimalFormat format = new DecimalFormat(); format.setRoundingMode(RoundingMode.HALF_EVEN); format.setMaximumFractionDigits(2); format.setMinimumFractionDigits(2); rowData[rowDataIndex][1] = format.format(strikePricesArr[strikePriceIndex]) + " "; // set 4 decimals for option prices format.setMaximumFractionDigits(4); format.setMinimumFractionDigits(4); double businessYearsToDate = ((double)businessDaysToDate(expirationDays[expDayIndex]))/TRADING_DAYS_IN_YEAR; double callOptionPrice, putOptionPrice; callOptionPrice = getCallOptionPrice(spotPrice, strikePricesArr[strikePriceIndex], riskFreeForce, annualVolatility, businessYearsToDate); putOptionPrice = getPutOptionPrice(spotPrice, strikePricesArr[strikePriceIndex], riskFreeForce, annualVolatility, businessYearsToDate); rowData[rowDataIndex][2] = format.format(callOptionPrice) + " "; rowData[rowDataIndex][3] = format.format(putOptionPrice) + " "; } } tableModel = new DefaultTableModel(rowData, COLUMN_NAMES); } public TableModel getTableModel() { return tableModel; } /* * * Helper Methods * */ /* * This method gets the last trade price (spot price) using Yahoo API * https://greenido.wordpress.com/2009/12/22/yahoo-finance-hidden-api/ */ private double getSpotPrice (String stockSymbol) throws Exception { double spotPrice; URL url = null; URLConnection urlConn = null; InputStreamReader inStream = null; BufferedReader br = null; /* * This returns a CSV file with a single cell indicating the current price */ try { url = new URL("http://finance.yahoo.com/d/quotes.csv?s=" + stockSymbol + "&f=l1"); // l1 indicates the Last Trade price urlConn = url.openConnection(); inStream = new InputStreamReader(urlConn.getInputStream()); br = new BufferedReader(inStream); spotPrice = Double.parseDouble(br.readLine()); } catch (MalformedURLException e) { throw new Exception("Problem retrieving stock data"); } catch (IOException e) { throw new Exception("Problem retrieving stock data"); } catch (Exception e) { throw e; } finally { if (inStream != null) inStream.close(); if (br != null) br.close(); } return spotPrice; } /* * This method gets the risk-free rate using the 10-yr treasury rate * Partial solution is: take current 10-yr treasury rate, subtract 1% historical risk premium * Ref: http://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd=1&cad=rja&uact=8&ved=0CCEQFjAA&url=http%3A%2F%2Fsom.yale.edu%2F~spiegel%2Fmgt541%2FLectures%2FEstimatingCAPMInputs.ppt&ei=AvSRVYS4MImt-AHd7LjgBg&usg=AFQjCNFSKQUQmq8wGdCUfXUgX8YYyPX6mQ&sig2=11oTNuIAtqBSYVRk7iktqQ&bvm=bv.96783405,d.cWw */ private double getRiskFreeRate() throws Exception { double tenYearTNotePrice = getSpotPrice("^TNX"); // get the 10-yr T-Note Price return tenYearTNotePrice - 1.; } private Calendar[] getExpirationDays() { ArrayList<Calendar> expirationDaysList = new ArrayList<Calendar>(); Calendar today = Calendar.getInstance(); today.setFirstDayOfWeek(Calendar.MONDAY); // need to get today's close day (weekend = Friday, otherwise previous day) Calendar todayClosingDay = (Calendar)today.clone(); //todo: before 4pm EST, closing is still today Calendar expirationDayToAdd = (Calendar)todayClosingDay.clone(); switch(todayClosingDay.get(Calendar.DAY_OF_WEEK)) { case Calendar.MONDAY: expirationDayToAdd.add(Calendar.DATE, 4); break; case Calendar.TUESDAY: expirationDayToAdd.add(Calendar.DATE, 3); break; case Calendar.WEDNESDAY: expirationDayToAdd.add(Calendar.DATE, 2); break; case Calendar.THURSDAY: expirationDayToAdd.add(Calendar.DATE, 1); break; case Calendar.FRIDAY: expirationDayToAdd.add(Calendar.DATE, 7); break; case Calendar.SATURDAY: expirationDayToAdd.add(Calendar.DATE, 6); break; case Calendar.SUNDAY: expirationDayToAdd.add(Calendar.DATE, 5); break; } // add 8 weeks expirationDaysList.add((Calendar)expirationDayToAdd.clone()); for (int i=0; i<7; i++) { expirationDayToAdd.add(Calendar.DATE, 7); expirationDaysList.add((Calendar)expirationDayToAdd.clone()); } // add 8 quarters (13-week periods) expirationDayToAdd.add(Calendar.DATE,(52/4-8)*7); expirationDaysList.add((Calendar)expirationDayToAdd.clone()); for (int i=0; i<7; i++) { expirationDayToAdd.add(Calendar.DATE, 52/4*7); expirationDaysList.add((Calendar)expirationDayToAdd.clone()); } Calendar[] expirationDaysArr = new Calendar[expirationDaysList.size()]; expirationDaysArr = expirationDaysList.toArray(expirationDaysArr); return expirationDaysArr; } private int businessDaysToDate(Calendar date) { // calculates business days until close of the date (excluding today) //TODO: account for holidays (or use TRADING_DAYS_IN_A_YEAR as approx) int businessDayCount = 0; Calendar today = Calendar.getInstance(); while (date.getTimeInMillis()-today.getTimeInMillis() > 0) { today.add(Calendar.DATE, 1); switch (today.get(Calendar.DAY_OF_WEEK)) { case Calendar.SATURDAY: case Calendar.SUNDAY: break; default: businessDayCount++; // only increment on weekday break; } } return businessDayCount; } // Black-Scholes Call Options formula private double getCallOptionPrice(double spotPrice, double strikePrice, double riskFreeForce, double annualVolatility, double timeInYears) { double d1, d2, Nd1, Nd2, callOptionPrice; final NormalDistribution normalDistribution = new NormalDistribution(); d1 = (Math.log(spotPrice/strikePrice) + (riskFreeForce + annualVolatility*annualVolatility/2)*timeInYears); d1 /= annualVolatility*Math.sqrt(timeInYears); d2 = d1 - annualVolatility*Math.sqrt(timeInYears); Nd1 = normalDistribution.cumulativeProbability(d1); Nd2 = normalDistribution.cumulativeProbability(d2); callOptionPrice = Nd1*spotPrice - Nd2*strikePrice*Math.exp(-riskFreeForce*timeInYears); return callOptionPrice; } // use Put-Call Parity to get the Put option from the corresponding Call option private double getPutOptionPrice(double spotPrice, double strikePrice, double riskFreeForce, double annualVolatility, double timeInYears) { double callOptionPrice = getCallOptionPrice(spotPrice, strikePrice, riskFreeForce, annualVolatility, timeInYears); double putOptionPrice = callOptionPrice - spotPrice + strikePrice*Math.exp(-riskFreeForce*timeInYears); return putOptionPrice; } // formats the representing JTable public static void formatTable(JTable table) { DefaultTableCellHeaderRenderer headerRenderer = new DefaultTableCellHeaderRenderer(); headerRenderer.setHorizontalAlignment(SwingConstants.CENTER); DefaultTableCellRenderer centerRenderer = new DefaultTableCellRenderer(); centerRenderer.setHorizontalAlignment(SwingConstants.CENTER); DefaultTableCellRenderer rightRenderer = new DefaultTableCellRenderer(); rightRenderer.setHorizontalAlignment(SwingConstants.RIGHT); for (int i = 0; i < COLUMN_NAMES.length ; i++) { table.getColumnModel().getColumn(i).setHeaderRenderer(headerRenderer); table.getColumnModel().getColumn(i).setCellRenderer(rightRenderer); } table.getColumnModel().getColumn(0).setCellRenderer(centerRenderer); table.getTableHeader().setFont(new Font("SansSerif", Font.BOLD, 14)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.9.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.accumulo.core.data.thrift; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings("all") public class InitialScan implements org.apache.thrift.TBase<InitialScan, InitialScan._Fields>, java.io.Serializable, Cloneable { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InitialScan"); private static final org.apache.thrift.protocol.TField SCAN_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("scanID", org.apache.thrift.protocol.TType.I64, (short)1); private static final org.apache.thrift.protocol.TField RESULT_FIELD_DESC = new org.apache.thrift.protocol.TField("result", org.apache.thrift.protocol.TType.STRUCT, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new InitialScanStandardSchemeFactory()); schemes.put(TupleScheme.class, new InitialScanTupleSchemeFactory()); } public long scanID; // required public ScanResult result; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ @SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum { SCAN_ID((short)1, "scanID"), RESULT((short)2, "result"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // SCAN_ID return SCAN_ID; case 2: // RESULT return RESULT; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __SCANID_ISSET_ID = 0; private byte __isset_bitfield = 0; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.SCAN_ID, new org.apache.thrift.meta_data.FieldMetaData("scanID", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64 , "ScanID"))); tmpMap.put(_Fields.RESULT, new org.apache.thrift.meta_data.FieldMetaData("result", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ScanResult.class))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(InitialScan.class, metaDataMap); } public InitialScan() { } public InitialScan( long scanID, ScanResult result) { this(); this.scanID = scanID; setScanIDIsSet(true); this.result = result; } /** * Performs a deep copy on <i>other</i>. */ public InitialScan(InitialScan other) { __isset_bitfield = other.__isset_bitfield; this.scanID = other.scanID; if (other.isSetResult()) { this.result = new ScanResult(other.result); } } public InitialScan deepCopy() { return new InitialScan(this); } @Override public void clear() { setScanIDIsSet(false); this.scanID = 0; this.result = null; } public long getScanID() { return this.scanID; } public InitialScan setScanID(long scanID) { this.scanID = scanID; setScanIDIsSet(true); return this; } public void unsetScanID() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SCANID_ISSET_ID); } /** Returns true if field scanID is set (has been assigned a value) and false otherwise */ public boolean isSetScanID() { return EncodingUtils.testBit(__isset_bitfield, __SCANID_ISSET_ID); } public void setScanIDIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SCANID_ISSET_ID, value); } public ScanResult getResult() { return this.result; } public InitialScan setResult(ScanResult result) { this.result = result; return this; } public void unsetResult() { this.result = null; } /** Returns true if field result is set (has been assigned a value) and false otherwise */ public boolean isSetResult() { return this.result != null; } public void setResultIsSet(boolean value) { if (!value) { this.result = null; } } public void setFieldValue(_Fields field, Object value) { switch (field) { case SCAN_ID: if (value == null) { unsetScanID(); } else { setScanID((Long)value); } break; case RESULT: if (value == null) { unsetResult(); } else { setResult((ScanResult)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case SCAN_ID: return Long.valueOf(getScanID()); case RESULT: return getResult(); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case SCAN_ID: return isSetScanID(); case RESULT: return isSetResult(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof InitialScan) return this.equals((InitialScan)that); return false; } public boolean equals(InitialScan that) { if (that == null) return false; boolean this_present_scanID = true; boolean that_present_scanID = true; if (this_present_scanID || that_present_scanID) { if (!(this_present_scanID && that_present_scanID)) return false; if (this.scanID != that.scanID) return false; } boolean this_present_result = true && this.isSetResult(); boolean that_present_result = true && that.isSetResult(); if (this_present_result || that_present_result) { if (!(this_present_result && that_present_result)) return false; if (!this.result.equals(that.result)) return false; } return true; } @Override public int hashCode() { return 0; } public int compareTo(InitialScan other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; InitialScan typedOther = (InitialScan)other; lastComparison = Boolean.valueOf(isSetScanID()).compareTo(typedOther.isSetScanID()); if (lastComparison != 0) { return lastComparison; } if (isSetScanID()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.scanID, typedOther.scanID); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetResult()).compareTo(typedOther.isSetResult()); if (lastComparison != 0) { return lastComparison; } if (isSetResult()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.result, typedOther.result); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("InitialScan("); boolean first = true; sb.append("scanID:"); sb.append(this.scanID); first = false; if (!first) sb.append(", "); sb.append("result:"); if (this.result == null) { sb.append("null"); } else { sb.append(this.result); } first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity if (result != null) { result.validate(); } } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class InitialScanStandardSchemeFactory implements SchemeFactory { public InitialScanStandardScheme getScheme() { return new InitialScanStandardScheme(); } } private static class InitialScanStandardScheme extends StandardScheme<InitialScan> { public void read(org.apache.thrift.protocol.TProtocol iprot, InitialScan struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // SCAN_ID if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.scanID = iprot.readI64(); struct.setScanIDIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // RESULT if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) { struct.result = new ScanResult(); struct.result.read(iprot); struct.setResultIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, InitialScan struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); oprot.writeFieldBegin(SCAN_ID_FIELD_DESC); oprot.writeI64(struct.scanID); oprot.writeFieldEnd(); if (struct.result != null) { oprot.writeFieldBegin(RESULT_FIELD_DESC); struct.result.write(oprot); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class InitialScanTupleSchemeFactory implements SchemeFactory { public InitialScanTupleScheme getScheme() { return new InitialScanTupleScheme(); } } private static class InitialScanTupleScheme extends TupleScheme<InitialScan> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, InitialScan struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; BitSet optionals = new BitSet(); if (struct.isSetScanID()) { optionals.set(0); } if (struct.isSetResult()) { optionals.set(1); } oprot.writeBitSet(optionals, 2); if (struct.isSetScanID()) { oprot.writeI64(struct.scanID); } if (struct.isSetResult()) { struct.result.write(oprot); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, InitialScan struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; BitSet incoming = iprot.readBitSet(2); if (incoming.get(0)) { struct.scanID = iprot.readI64(); struct.setScanIDIsSet(true); } if (incoming.get(1)) { struct.result = new ScanResult(); struct.result.read(iprot); struct.setResultIsSet(true); } } } }
package org.contikios.cooja.serialsocket; /* * Copyright (c) 2014, TU Braunschweig. * Copyright (c) 2010, Swedish Institute of Computer Science. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the Institute nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * */ import java.awt.BorderLayout; import java.awt.Color; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.GridLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.net.ServerSocket; import java.net.Socket; import java.text.NumberFormat; import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Observable; import java.util.Observer; import java.util.logging.Level; import javax.swing.BorderFactory; import javax.swing.BoxLayout; import javax.swing.JButton; import javax.swing.JFormattedTextField; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JSeparator; import javax.swing.SwingUtilities; import javax.swing.Timer; import javax.swing.border.EtchedBorder; import javax.swing.text.NumberFormatter; import org.apache.log4j.Logger; import org.jdom.Element; import org.contikios.cooja.ClassDescription; import org.contikios.cooja.Cooja; import org.contikios.cooja.Mote; import org.contikios.cooja.MotePlugin; import org.contikios.cooja.PluginType; import org.contikios.cooja.Simulation; import org.contikios.cooja.VisPlugin; import org.contikios.cooja.interfaces.SerialPort; /** * Socket to simulated serial port forwarder. Server version. * * @author Fredrik Osterlind * @author Enrico Jorns */ @ClassDescription("Serial Socket (SERVER)") @PluginType(PluginType.MOTE_PLUGIN) public class SerialSocketServer extends VisPlugin implements MotePlugin { private static final long serialVersionUID = 1L; private static final Logger logger = Logger.getLogger(SerialSocketServer.class); private final static int STATUSBAR_WIDTH = 350; private static final Color COLOR_NEUTRAL = Color.DARK_GRAY; private static final Color COLOR_POSITIVE = new Color(0, 161, 83); private static final Color COLOR_NEGATIVE = Color.RED; private final int SERVER_DEFAULT_PORT; private final SerialPort serialPort; private Observer serialDataObserver; private JLabel socketToMoteLabel; private JLabel moteToSocketLabel; private JLabel socketStatusLabel; private JFormattedTextField listenPortField; private JButton serverStartButton; private int inBytes = 0, outBytes = 0; private ServerSocket serverSocket; private Socket clientSocket; private Mote mote; private Simulation simulation; public SerialSocketServer(Mote mote, Simulation simulation, final Cooja gui) { super("Serial Socket (SERVER) (" + mote + ")", gui, false); this.mote = mote; this.simulation = simulation; updateTimer.start(); SERVER_DEFAULT_PORT = 60000 + mote.getID(); /* GUI components */ if (Cooja.isVisualized()) { setResizable(false); setLayout(new BorderLayout()); // --- Server Port setup GridBagConstraints c = new GridBagConstraints(); JPanel socketPanel = new JPanel(new GridBagLayout()); socketPanel.setBorder(BorderFactory.createEmptyBorder(2, 2, 2, 2)); JLabel label = new JLabel("Listen port: "); c.gridx = 0; c.gridy = 0; c.weightx = 0.1; c.anchor = GridBagConstraints.EAST; socketPanel.add(label, c); NumberFormat nf = NumberFormat.getIntegerInstance(); nf.setGroupingUsed(false); listenPortField = new JFormattedTextField(new NumberFormatter(nf)); listenPortField.setColumns(5); listenPortField.setText(String.valueOf(SERVER_DEFAULT_PORT)); c.gridx++; c.weightx = 0.0; socketPanel.add(listenPortField, c); serverStartButton = new JButton("Start") { // Button for label toggeling private final String altString = "Stop"; @Override public Dimension getPreferredSize() { String origText = getText(); Dimension origDim = super.getPreferredSize(); setText(altString); Dimension altDim = super.getPreferredSize(); setText(origText); return new Dimension(Math.max(origDim.width, altDim.width), origDim.height); } }; c.gridx++; c.weightx = 0.1; c.anchor = GridBagConstraints.EAST; socketPanel.add(serverStartButton, c); c.gridx = 0; c.gridy++; c.gridwidth = GridBagConstraints.REMAINDER; c.fill = GridBagConstraints.HORIZONTAL; socketPanel.add(new JSeparator(JSeparator.HORIZONTAL), c); add(BorderLayout.NORTH, socketPanel); // --- Incoming / outgoing info JPanel connectionInfoPanel = new JPanel(new GridLayout(0, 2)); connectionInfoPanel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5)); c = new GridBagConstraints(); label = new JLabel("socket -> mote: "); label.setHorizontalAlignment(JLabel.RIGHT); c.gridx = 0; c.gridy = 0; c.anchor = GridBagConstraints.EAST; connectionInfoPanel.add(label); socketToMoteLabel = new JLabel("0 bytes"); c.gridx++; c.anchor = GridBagConstraints.WEST; connectionInfoPanel.add(socketToMoteLabel); label = new JLabel("mote -> socket: "); label.setHorizontalAlignment(JLabel.RIGHT); c.gridx = 0; c.gridy++; c.anchor = GridBagConstraints.EAST; connectionInfoPanel.add(label); moteToSocketLabel = new JLabel("0 bytes"); c.gridx++; c.anchor = GridBagConstraints.WEST; connectionInfoPanel.add(moteToSocketLabel); add(BorderLayout.CENTER, connectionInfoPanel); // --- Status bar JPanel statusBarPanel = new JPanel(new BorderLayout()) { @Override public Dimension getPreferredSize() { Dimension d = super.getPreferredSize(); return new Dimension(STATUSBAR_WIDTH, d.height); } }; statusBarPanel.setLayout(new BoxLayout(statusBarPanel, BoxLayout.LINE_AXIS)); statusBarPanel.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.RAISED)); label = new JLabel("Status: "); statusBarPanel.add(label); socketStatusLabel = new JLabel("Idle"); socketStatusLabel.setForeground(Color.DARK_GRAY); statusBarPanel.add(socketStatusLabel); add(BorderLayout.SOUTH, statusBarPanel); serverStartButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (e.getActionCommand().equals("Start")) { try { listenPortField.commitEdit(); } catch (ParseException ex) { java.util.logging.Logger.getLogger(SerialSocketClient.class.getName()).log(Level.SEVERE, null, ex); } startServer(((Long) listenPortField.getValue()).intValue()); } else { stopServer(); } } }); pack(); } /* Mote serial port */ serialPort = (SerialPort) mote.getInterfaces().getLog(); if (serialPort == null) { throw new RuntimeException("No mote serial port"); } if (Cooja.isVisualized()) { // gui updates for server status updates addServerListener(new ServerListener() { @Override public void onServerStarted(final int port) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { System.out.println("onServerStarted"); socketStatusLabel.setForeground(COLOR_NEUTRAL); socketStatusLabel.setText("Listening on port " + String.valueOf(port)); listenPortField.setEnabled(false); serverStartButton.setText("Stop"); } }); } @Override public void onClientConnected(final Socket client) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { socketStatusLabel.setForeground(COLOR_POSITIVE); socketStatusLabel.setText("Client " + client.getInetAddress() + ":" + client.getPort() + " connected."); } }); } @Override public void onClientDisconnected() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { // XXX check why needed if (serverSocket != null) { socketStatusLabel.setForeground(COLOR_NEUTRAL); socketStatusLabel.setText("Listening on port " + String.valueOf(serverSocket.getLocalPort())); } } }); } @Override public void onServerStopped() { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { listenPortField.setEnabled(true); serverStartButton.setText("Start"); socketStatusLabel.setForeground(COLOR_NEUTRAL); socketStatusLabel.setText("Idle"); } }); } @Override public void onServerError(final String msg) { SwingUtilities.invokeLater(new Runnable() { @Override public void run() { socketStatusLabel.setForeground(COLOR_NEGATIVE); socketStatusLabel.setText(msg); } }); } }); } } private List<ServerListener> listeners = new LinkedList<>(); public interface ServerListener { void onServerStarted(int port); void onClientConnected(Socket client); void onClientDisconnected(); void onServerStopped(); void onServerError(String msg); } private void addServerListener(ServerListener listener) { listeners.add(listener); } public void notifyServerStarted(int port) { for (ServerListener listener : listeners) { listener.onServerStarted(port); } } public void notifyClientConnected(Socket client) { for (ServerListener listener : listeners) { listener.onClientConnected(client); } } public void notifyClientDisconnected() { for (ServerListener listener : listeners) { listener.onClientDisconnected(); } } public void notifyServerStopped() { for (ServerListener listener : listeners) { listener.onServerStopped(); } } public void notifyServerError(String msg) { for (ServerListener listener : listeners) { listener.onServerError(msg); } } /** * Start server .. * @param port */ public void startServer(int port) { try { serverSocket = new ServerSocket(port); logger.info("Listening on port: " + port); notifyServerStarted(port); } catch (IOException ex) { logger.error(ex.getMessage()); notifyServerError(ex.getMessage()); return; } new Thread() { private Thread incomingDataHandler; @Override public void run() { while (!serverSocket.isClosed()) { try { // wait for next client Socket candidateSocket = serverSocket.accept(); // reject connection if already one client connected if (clientSocket != null && !clientSocket.isClosed()) { logger.info("Refused connection of client " + candidateSocket.getInetAddress()); candidateSocket.close(); continue; } clientSocket = candidateSocket; /* Start handler for data input from socket */ incomingDataHandler = new Thread(new IncomingDataHandler()); incomingDataHandler.start(); /* Observe serial port for outgoing data */ serialDataObserver = new SerialDataObserver(); serialPort.addSerialDataObserver(serialDataObserver); inBytes = outBytes = 0; logger.info("Client connected: " + clientSocket.getInetAddress()); notifyClientConnected(clientSocket); } catch (IOException e) { logger.info("Listening thread shut down: " + e.getMessage()); try { serverSocket.close(); } catch (IOException ex) { logger.error(ex); } } } cleanupClient(); if (incomingDataHandler != null) { // Wait for reader thread to terminate try { incomingDataHandler.join(500); } catch (InterruptedException ex) { logger.warn(ex); } } notifyServerStopped(); } }.start(); } /** * Stops server by closing server listen socket. */ public void stopServer() { try { serverSocket.close(); } catch (IOException ex) { logger.error(ex); } } /* Forward data: virtual port -> mote */ private class IncomingDataHandler implements Runnable { DataInputStream in; @Override public void run() { int numRead = 0; byte[] data = new byte[1024]; try { in = new DataInputStream(clientSocket.getInputStream()); } catch (IOException ex) { logger.error(ex); return; } logger.info("Forwarder: socket -> serial port"); while (numRead >= 0) { final int finalNumRead = numRead; final byte[] finalData = data; /* We are not on the simulation thread */ simulation.invokeSimulationThread(new Runnable() { @Override public void run() { for (int i = 0; i < finalNumRead; i++) { serialPort.writeByte(finalData[i]); } inBytes += finalNumRead; } }); try { numRead = in.read(data); } catch (IOException e) { logger.info(e.getMessage()); numRead = -1; } } logger.info("End of Stream"); cleanupClient(); } } private class SerialDataObserver implements Observer { DataOutputStream out; public SerialDataObserver() { try { out = new DataOutputStream(clientSocket.getOutputStream()); } catch (IOException ex) { logger.error(ex); out = null; } } @Override public void update(Observable obs, Object obj) { try { if (out == null) { /*logger.debug("out is null");*/ return; } out.write(serialPort.getLastSerialData()); out.flush(); outBytes++; } catch (IOException ex) { logger.error(ex); cleanupClient(); } } } @Override public Collection<Element> getConfigXML() { List<Element> config = new ArrayList<>(); Element element; // XXX isVisualized guards? element = new Element("port"); if (serverSocket == null || !serverSocket.isBound()) { try { listenPortField.commitEdit(); element.setText(String.valueOf((Long) listenPortField.getValue())); } catch (ParseException ex) { logger.error(ex.getMessage()); listenPortField.setText("null"); } } else { element.setText(String.valueOf(serverSocket.getLocalPort())); } config.add(element); element = new Element("bound"); if (serverSocket == null) { element.setText(String.valueOf(false)); } else { element.setText(String.valueOf(!serverSocket.isClosed())); } config.add(element); return config; } @Override public boolean setConfigXML(Collection<Element> configXML, boolean visAvailable) { Integer port = null; boolean bound = false; for (Element element : configXML) { switch (element.getName()) { case "port": port = Integer.parseInt(element.getText()); break; case "bound": bound = Boolean.parseBoolean(element.getText()); break; default: logger.warn("Unknwon config element: " + element.getName()); break; } } if (Cooja.isVisualized()) { if (port != null) { listenPortField.setText(String.valueOf(port)); } if (bound) { serverStartButton.doClick(); } } else { // if bound and all set up, start client if (port != null) { startServer(port); } else { logger.error("Server not started due to incomplete configuration"); } } return true; } private void cleanupClient() { try { if (clientSocket != null) { clientSocket.close(); clientSocket = null; } } catch (IOException e1) { logger.error(e1.getMessage()); } serialPort.deleteSerialDataObserver(serialDataObserver); notifyClientDisconnected(); } private boolean closed = false; @Override public void closePlugin() { closed = true; cleanupClient(); try { if (serverSocket != null) { serverSocket.close(); } } catch (IOException ex) { logger.error(ex); } } @Override public Mote getMote() { return mote; } private static final int UPDATE_INTERVAL = 150; private Timer updateTimer = new Timer(UPDATE_INTERVAL, new ActionListener() { @Override public void actionPerformed(ActionEvent e) { if (closed) { updateTimer.stop(); return; } if (Cooja.isVisualized()) { socketToMoteLabel.setText(inBytes + " bytes"); moteToSocketLabel.setText(outBytes + " bytes"); } } }); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.api.common.typeutils.base; import org.apache.flink.annotation.Internal; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.common.typeutils.TypeSerializerSnapshot; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; import org.apache.flink.util.Preconditions; import java.io.IOException; import java.util.HashMap; import java.util.Map; /** * A serializer for {@link Map}. The serializer relies on a key serializer and a value serializer * for the serialization of the map's key-value pairs. * * <p>The serialization format for the map is as follows: four bytes for the length of the map, * followed by the serialized representation of each key-value pair. To allow null values, each * value is prefixed by a null marker. * * @param <K> The type of the keys in the map. * @param <V> The type of the values in the map. */ @Internal public final class MapSerializer<K, V> extends TypeSerializer<Map<K, V>> { private static final long serialVersionUID = -6885593032367050078L; /** The serializer for the keys in the map */ private final TypeSerializer<K> keySerializer; /** The serializer for the values in the map */ private final TypeSerializer<V> valueSerializer; /** * Creates a map serializer that uses the given serializers to serialize the key-value pairs in * the map. * * @param keySerializer The serializer for the keys in the map * @param valueSerializer The serializer for the values in the map */ public MapSerializer(TypeSerializer<K> keySerializer, TypeSerializer<V> valueSerializer) { this.keySerializer = Preconditions.checkNotNull(keySerializer, "The key serializer cannot be null"); this.valueSerializer = Preconditions.checkNotNull(valueSerializer, "The value serializer cannot be null."); } // ------------------------------------------------------------------------ // MapSerializer specific properties // ------------------------------------------------------------------------ public TypeSerializer<K> getKeySerializer() { return keySerializer; } public TypeSerializer<V> getValueSerializer() { return valueSerializer; } // ------------------------------------------------------------------------ // Type Serializer implementation // ------------------------------------------------------------------------ @Override public boolean isImmutableType() { return false; } @Override public TypeSerializer<Map<K, V>> duplicate() { TypeSerializer<K> duplicateKeySerializer = keySerializer.duplicate(); TypeSerializer<V> duplicateValueSerializer = valueSerializer.duplicate(); return (duplicateKeySerializer == keySerializer) && (duplicateValueSerializer == valueSerializer) ? this : new MapSerializer<>(duplicateKeySerializer, duplicateValueSerializer); } @Override public Map<K, V> createInstance() { return new HashMap<>(); } @Override public Map<K, V> copy(Map<K, V> from) { Map<K, V> newMap = new HashMap<>(from.size()); for (Map.Entry<K, V> entry : from.entrySet()) { K newKey = keySerializer.copy(entry.getKey()); V newValue = entry.getValue() == null ? null : valueSerializer.copy(entry.getValue()); newMap.put(newKey, newValue); } return newMap; } @Override public Map<K, V> copy(Map<K, V> from, Map<K, V> reuse) { return copy(from); } @Override public int getLength() { return -1; // var length } @Override public void serialize(Map<K, V> map, DataOutputView target) throws IOException { final int size = map.size(); target.writeInt(size); for (Map.Entry<K, V> entry : map.entrySet()) { keySerializer.serialize(entry.getKey(), target); if (entry.getValue() == null) { target.writeBoolean(true); } else { target.writeBoolean(false); valueSerializer.serialize(entry.getValue(), target); } } } @Override public Map<K, V> deserialize(DataInputView source) throws IOException { final int size = source.readInt(); final Map<K, V> map = new HashMap<>(size); for (int i = 0; i < size; ++i) { K key = keySerializer.deserialize(source); boolean isNull = source.readBoolean(); V value = isNull ? null : valueSerializer.deserialize(source); map.put(key, value); } return map; } @Override public Map<K, V> deserialize(Map<K, V> reuse, DataInputView source) throws IOException { return deserialize(source); } @Override public void copy(DataInputView source, DataOutputView target) throws IOException { final int size = source.readInt(); target.writeInt(size); for (int i = 0; i < size; ++i) { keySerializer.copy(source, target); boolean isNull = source.readBoolean(); target.writeBoolean(isNull); if (!isNull) { valueSerializer.copy(source, target); } } } @Override public boolean equals(Object obj) { return obj == this || (obj != null && obj.getClass() == getClass() && keySerializer.equals(((MapSerializer<?, ?>) obj).getKeySerializer()) && valueSerializer.equals( ((MapSerializer<?, ?>) obj).getValueSerializer())); } @Override public int hashCode() { return keySerializer.hashCode() * 31 + valueSerializer.hashCode(); } // -------------------------------------------------------------------------------------------- // Serializer configuration snapshotting // -------------------------------------------------------------------------------------------- @Override public TypeSerializerSnapshot<Map<K, V>> snapshotConfiguration() { return new MapSerializerSnapshot<>(this); } }
/* */ package eu.uqasar.web.pages.admin.companies; /* * #%L * U-QASAR * %% * Copyright (C) 2012 - 2015 U-QASAR Consortium * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Objects; import javax.inject.Inject; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.AjaxFormChoiceComponentUpdatingBehavior; import org.apache.wicket.ajax.markup.html.form.AjaxSubmitLink; import org.apache.wicket.markup.head.CssHeaderItem; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.Check; import org.apache.wicket.markup.html.form.CheckGroup; import org.apache.wicket.markup.html.form.CheckGroupSelector; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.link.BookmarkablePageLink; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.markup.repeater.Item; import org.apache.wicket.markup.repeater.data.DataView; import org.apache.wicket.model.Model; import org.apache.wicket.model.PropertyModel; import org.apache.wicket.model.StringResourceModel; import org.apache.wicket.request.mapper.parameter.PageParameters; import de.agilecoders.wicket.core.markup.html.bootstrap.behavior.CssClassNameAppender; import de.agilecoders.wicket.core.markup.html.bootstrap.button.Buttons; import de.agilecoders.wicket.core.markup.html.bootstrap.dialog.Modal; import de.agilecoders.wicket.core.markup.html.bootstrap.navigation.ajax.BootstrapAjaxPagingNavigator; import eu.uqasar.model.company.Company; import eu.uqasar.model.user.Team; import eu.uqasar.model.user.TeamMembership; import eu.uqasar.model.user.User; import eu.uqasar.service.company.CompanyService; import eu.uqasar.service.user.TeamMembershipService; import eu.uqasar.service.user.TeamService; import eu.uqasar.service.user.UserService; import eu.uqasar.web.UQSession; import eu.uqasar.web.components.ModalActionButton; import eu.uqasar.web.components.NotificationModal; import eu.uqasar.web.pages.admin.AdminBasePage; import eu.uqasar.web.pages.admin.companies.panels.CompanyFilterPanel; import eu.uqasar.web.pages.admin.companies.panels.CompanyFilterStructure; import eu.uqasar.web.provider.EntityProvider; /** * * */ public class CompanyListPage extends AdminBasePage { @Inject private CompanyService companyService; @Inject private UserService userservice; @Inject private TeamService teamService; @Inject private TeamMembershipService teamMembershipService; private final CheckGroup<Company> companyGroup; private final AjaxSubmitLink deleteSelectedButton; private final BootstrapAjaxPagingNavigator navigator; private final Modal deleteConfirmationModal; private final WebMarkupContainer companyContainer; private CompanyProvider companyProvider; // how many items do we show per page private static final int itemsPerPage = 10; public CompanyListPage(final PageParameters pageParameters) { super(pageParameters); Form<Void> form = new Form("form"); companyContainer = new WebMarkupContainer("companyContainer"); CompanyFilterPanel filterPanel = new CompanyFilterPanel("filter") { @Override public void applyClicked(AjaxRequestTarget target, Form<?> form) { if(getFilter().getName() == null && getFilter().getShortName() == null && getFilter().getCountry() == null){ companyProvider.setFilter(null); }else{ companyProvider.setFilter(this.getFilter()); } target.add(companyContainer); } @Override public void resetClicked(AjaxRequestTarget target, Form<?> form) { companyProvider.setFilter(null); target.add(companyContainer); } }; add(filterPanel); if(filterPanel.getFilter().getName() == null && filterPanel.getFilter().getShortName() == null && filterPanel.getFilter().getCountry() == null){ companyProvider = new CompanyProvider(); } else{ companyProvider = new CompanyProvider(filterPanel.getFilter()); } form.add(companyGroup = newCheckGroup()); companyGroup.add(companyContainer.setOutputMarkupId(true)); CheckGroupSelector checkGroupSelector = new CheckGroupSelector("companyGroupSelector", companyGroup); companyContainer.add(checkGroupSelector); companyContainer.add(deleteSelectedButton = newDeleteSelectedButton(companyGroup)); DataView<Company> companiesView = new DataView<Company>("companies", companyProvider, itemsPerPage) { @Override protected void populateItem(Item<Company> item) { final Company company = item.getModelObject(); Check<Company> check = newDeleteCheck(item); item.add(check); Link companyEditNameLink = new BookmarkablePageLink("link.name.edit.company", CompanyEditPage.class, new PageParameters().add("id", company.getId())); item.add(companyEditNameLink.add(new Label("td.name", new PropertyModel<>(company, "name")))); item.add(new Label("td.shortName", new PropertyModel<>(company, "shortName"))); item.add(new Label("td.street", new PropertyModel<>(company, "street") )); item.add(new Label("td.zipcode", new PropertyModel<>(company, "zipcode") )); item.add(new Label("td.city", new PropertyModel<>(company, "city") )); item.add(new Label("td.country", new PropertyModel<>(company, "country") )); item.add(new Label("td.phone", new PropertyModel<>(company, "phone") )); item.add(new Label("td.fax", new PropertyModel<>(company, "fax") )); Link companyEditLink = new BookmarkablePageLink("link.actions.edit.company", CompanyEditPage.class, new PageParameters().add("id", company.getId())); item.add(companyEditLink); item.setOutputMarkupId(true); } }; // add links for table pagination companyContainer.add(navigator = new BootstrapAjaxPagingNavigator( "navigatorFoot", companiesView)); companyContainer.add(companiesView); BookmarkablePageLink<CompanyEditPage> createUser = new BookmarkablePageLink<>("link.create.company", CompanyEditPage.class); companyContainer.add(createUser); add(form); // add confirmation modal for deleting products add(deleteConfirmationModal = newDeleteConfirmationModal()); } private void deleteSelectedCompanies(Collection<Company> companies, AjaxRequestTarget target) { String message = new StringResourceModel("delete.confirmed", this, null).getString(); for (Company company : companies) { List<User> users = userservice.getByCompany(company); for (User user : users) { Collection<TeamMembership> members = teamMembershipService.getForUser(user); if (!members.isEmpty()) { for (TeamMembership membership : members) { Team team = membership.getTeam(); team.getMembers().remove(membership); teamMembershipService.delete(membership); teamService.update(team); } } } userservice.delete(users); } companyService.delete(companies); getPage().success(message); target.add(feedbackPanel); target.add(companyContainer); companyGroup.updateModel(); target.add(deleteSelectedButton); setResponsePage(CompanyListPage.class); } private NotificationModal newDeleteConfirmationModal() { final NotificationModal notificationModal = new NotificationModal( "deleteConfirmationModal", new StringResourceModel( "delete.confirmation.modal.header", this, null), new StringResourceModel("delete.confirmation.modal.message", this, null), false); notificationModal.addButton(new ModalActionButton(notificationModal, Buttons.Type.Primary, new StringResourceModel( "delete.confirmation.modal.submit.text", this, null), true) { @Override protected void onAfterClick(AjaxRequestTarget target) { // confirmed --> delete deleteSelectedCompanies(companyGroup.getModelObject(), target); // close modal closeDeleteConfirmationModal(notificationModal, target); } }); notificationModal.addButton(new ModalActionButton(notificationModal, Buttons.Type.Default, new StringResourceModel( "delete.confirmation.modal.cancel.text", this, null), true) { @Override protected void onAfterClick(AjaxRequestTarget target) { // Cancel clicked --> do nothing, close modal closeDeleteConfirmationModal(notificationModal, target); } }); return notificationModal; } private void closeDeleteConfirmationModal(final Modal modal, AjaxRequestTarget target) { modal.appendCloseDialogJavaScript(target); } public static PageParameters forCompany(Company company) { if(company.getName() == null) { return new PageParameters(); } else if (company != null) { return forCompany(company.getName()); } else { return new PageParameters(); } } public static PageParameters forCompany(final String name) { return new PageParameters().add("name", name); } @Override protected void onConfigure() { super.onConfigure(); navigator.setVisible(companyProvider.size() > itemsPerPage); } @Override public void renderHead(IHeaderResponse response) { super.renderHead(response); response.render(CssHeaderItem.forUrl("assets/css/admin/user.css")); } private AjaxSubmitLink newDeleteSelectedButton( final CheckGroup<Company> companyGroup) { AjaxSubmitLink submitLink = new AjaxSubmitLink("deleteSelected") { @Override protected void onConfigure() { super.onConfigure(); // only enabled if at least one user is selected if (companyGroup.getModelObject().isEmpty()) { add(new CssClassNameAppender(Model.of("disabled")) { private static final long serialVersionUID = 5588027455196328830L; // remove css class when component is rendered again @Override public boolean isTemporary(Component component) { return true; } }); setEnabled(false); } else { setEnabled(true); } } @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { deleteConfirmationModal.appendShowDialogJavaScript(target); } }; submitLink.setOutputMarkupId(true); return submitLink; } private CheckGroup newCheckGroup() { CheckGroup<User> checkGroup = new CheckGroup<>("companyGroup", new ArrayList<User>()); checkGroup.add(new AjaxFormChoiceComponentUpdatingBehavior() { @Override protected void onUpdate(AjaxRequestTarget target) { target.add(deleteSelectedButton); } }); return checkGroup; } private Check<Company> newDeleteCheck(final Item<Company> item) { Check<Company> check = new Check<Company>("companyCheck", item.getModel(), companyGroup) { @Override protected void onConfigure() { super.onConfigure(); setVisible(!Objects.equals(item.getModelObject().getId(), UQSession.get().getLoggedInUser().getId())); } }; return check; } private final class CompanyProvider extends EntityProvider<Company> { private static final long serialVersionUID = -1527580045919906872L; private CompanyFilterStructure filter; private String selected; public CompanyProvider() { this.filter = null; }; public CompanyProvider(CompanyFilterStructure filter) { this.filter = filter; } public void setFilter(CompanyFilterStructure filter) { this.filter = filter; } @Override public Iterator<? extends Company> iterator(long first, long count) { if(filter == null){ return companyService.getAllByAscendingName().iterator(); } else { return companyService.getAllByAscendingNameFiltered( filter, Long.valueOf(first).intValue(), Long.valueOf(count).intValue()).iterator(); } } @Override public long size() { if(filter == null){ return companyService.countAll(); } else{ return companyService.countAllFiltered(filter); } } } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.computeoptimizer.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/compute-optimizer-2019-11-01/UpdateEnrollmentStatus" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdateEnrollmentStatusRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The new enrollment status of the account. * </p> * <p> * The following status options are available: * </p> * <ul> * <li> * <p> * <code>Active</code> - Opts in your account to the Compute Optimizer service. Compute Optimizer begins analyzing * the configuration and utilization metrics of your Amazon Web Services resources after you opt in. For more * information, see <a href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html">Metrics analyzed * by Compute Optimizer</a> in the <i>Compute Optimizer User Guide</i>. * </p> * </li> * <li> * <p> * <code>Inactive</code> - Opts out your account from the Compute Optimizer service. Your account's recommendations * and related metrics data will be deleted from Compute Optimizer after you opt out. * </p> * </li> * </ul> * <note> * <p> * The <code>Pending</code> and <code>Failed</code> options cannot be used to update the enrollment status of an * account. They are returned in the response of a request to update the enrollment status of an account. * </p> * </note> */ private String status; /** * <p> * Indicates whether to enroll member accounts of the organization if the account is the management account of an * organization. * </p> */ private Boolean includeMemberAccounts; /** * <p> * The new enrollment status of the account. * </p> * <p> * The following status options are available: * </p> * <ul> * <li> * <p> * <code>Active</code> - Opts in your account to the Compute Optimizer service. Compute Optimizer begins analyzing * the configuration and utilization metrics of your Amazon Web Services resources after you opt in. For more * information, see <a href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html">Metrics analyzed * by Compute Optimizer</a> in the <i>Compute Optimizer User Guide</i>. * </p> * </li> * <li> * <p> * <code>Inactive</code> - Opts out your account from the Compute Optimizer service. Your account's recommendations * and related metrics data will be deleted from Compute Optimizer after you opt out. * </p> * </li> * </ul> * <note> * <p> * The <code>Pending</code> and <code>Failed</code> options cannot be used to update the enrollment status of an * account. They are returned in the response of a request to update the enrollment status of an account. * </p> * </note> * * @param status * The new enrollment status of the account.</p> * <p> * The following status options are available: * </p> * <ul> * <li> * <p> * <code>Active</code> - Opts in your account to the Compute Optimizer service. Compute Optimizer begins * analyzing the configuration and utilization metrics of your Amazon Web Services resources after you opt * in. For more information, see <a * href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html">Metrics analyzed by Compute * Optimizer</a> in the <i>Compute Optimizer User Guide</i>. * </p> * </li> * <li> * <p> * <code>Inactive</code> - Opts out your account from the Compute Optimizer service. Your account's * recommendations and related metrics data will be deleted from Compute Optimizer after you opt out. * </p> * </li> * </ul> * <note> * <p> * The <code>Pending</code> and <code>Failed</code> options cannot be used to update the enrollment status of * an account. They are returned in the response of a request to update the enrollment status of an account. * </p> * @see Status */ public void setStatus(String status) { this.status = status; } /** * <p> * The new enrollment status of the account. * </p> * <p> * The following status options are available: * </p> * <ul> * <li> * <p> * <code>Active</code> - Opts in your account to the Compute Optimizer service. Compute Optimizer begins analyzing * the configuration and utilization metrics of your Amazon Web Services resources after you opt in. For more * information, see <a href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html">Metrics analyzed * by Compute Optimizer</a> in the <i>Compute Optimizer User Guide</i>. * </p> * </li> * <li> * <p> * <code>Inactive</code> - Opts out your account from the Compute Optimizer service. Your account's recommendations * and related metrics data will be deleted from Compute Optimizer after you opt out. * </p> * </li> * </ul> * <note> * <p> * The <code>Pending</code> and <code>Failed</code> options cannot be used to update the enrollment status of an * account. They are returned in the response of a request to update the enrollment status of an account. * </p> * </note> * * @return The new enrollment status of the account.</p> * <p> * The following status options are available: * </p> * <ul> * <li> * <p> * <code>Active</code> - Opts in your account to the Compute Optimizer service. Compute Optimizer begins * analyzing the configuration and utilization metrics of your Amazon Web Services resources after you opt * in. For more information, see <a * href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html">Metrics analyzed by Compute * Optimizer</a> in the <i>Compute Optimizer User Guide</i>. * </p> * </li> * <li> * <p> * <code>Inactive</code> - Opts out your account from the Compute Optimizer service. Your account's * recommendations and related metrics data will be deleted from Compute Optimizer after you opt out. * </p> * </li> * </ul> * <note> * <p> * The <code>Pending</code> and <code>Failed</code> options cannot be used to update the enrollment status * of an account. They are returned in the response of a request to update the enrollment status of an * account. * </p> * @see Status */ public String getStatus() { return this.status; } /** * <p> * The new enrollment status of the account. * </p> * <p> * The following status options are available: * </p> * <ul> * <li> * <p> * <code>Active</code> - Opts in your account to the Compute Optimizer service. Compute Optimizer begins analyzing * the configuration and utilization metrics of your Amazon Web Services resources after you opt in. For more * information, see <a href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html">Metrics analyzed * by Compute Optimizer</a> in the <i>Compute Optimizer User Guide</i>. * </p> * </li> * <li> * <p> * <code>Inactive</code> - Opts out your account from the Compute Optimizer service. Your account's recommendations * and related metrics data will be deleted from Compute Optimizer after you opt out. * </p> * </li> * </ul> * <note> * <p> * The <code>Pending</code> and <code>Failed</code> options cannot be used to update the enrollment status of an * account. They are returned in the response of a request to update the enrollment status of an account. * </p> * </note> * * @param status * The new enrollment status of the account.</p> * <p> * The following status options are available: * </p> * <ul> * <li> * <p> * <code>Active</code> - Opts in your account to the Compute Optimizer service. Compute Optimizer begins * analyzing the configuration and utilization metrics of your Amazon Web Services resources after you opt * in. For more information, see <a * href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html">Metrics analyzed by Compute * Optimizer</a> in the <i>Compute Optimizer User Guide</i>. * </p> * </li> * <li> * <p> * <code>Inactive</code> - Opts out your account from the Compute Optimizer service. Your account's * recommendations and related metrics data will be deleted from Compute Optimizer after you opt out. * </p> * </li> * </ul> * <note> * <p> * The <code>Pending</code> and <code>Failed</code> options cannot be used to update the enrollment status of * an account. They are returned in the response of a request to update the enrollment status of an account. * </p> * @return Returns a reference to this object so that method calls can be chained together. * @see Status */ public UpdateEnrollmentStatusRequest withStatus(String status) { setStatus(status); return this; } /** * <p> * The new enrollment status of the account. * </p> * <p> * The following status options are available: * </p> * <ul> * <li> * <p> * <code>Active</code> - Opts in your account to the Compute Optimizer service. Compute Optimizer begins analyzing * the configuration and utilization metrics of your Amazon Web Services resources after you opt in. For more * information, see <a href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html">Metrics analyzed * by Compute Optimizer</a> in the <i>Compute Optimizer User Guide</i>. * </p> * </li> * <li> * <p> * <code>Inactive</code> - Opts out your account from the Compute Optimizer service. Your account's recommendations * and related metrics data will be deleted from Compute Optimizer after you opt out. * </p> * </li> * </ul> * <note> * <p> * The <code>Pending</code> and <code>Failed</code> options cannot be used to update the enrollment status of an * account. They are returned in the response of a request to update the enrollment status of an account. * </p> * </note> * * @param status * The new enrollment status of the account.</p> * <p> * The following status options are available: * </p> * <ul> * <li> * <p> * <code>Active</code> - Opts in your account to the Compute Optimizer service. Compute Optimizer begins * analyzing the configuration and utilization metrics of your Amazon Web Services resources after you opt * in. For more information, see <a * href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html">Metrics analyzed by Compute * Optimizer</a> in the <i>Compute Optimizer User Guide</i>. * </p> * </li> * <li> * <p> * <code>Inactive</code> - Opts out your account from the Compute Optimizer service. Your account's * recommendations and related metrics data will be deleted from Compute Optimizer after you opt out. * </p> * </li> * </ul> * <note> * <p> * The <code>Pending</code> and <code>Failed</code> options cannot be used to update the enrollment status of * an account. They are returned in the response of a request to update the enrollment status of an account. * </p> * @return Returns a reference to this object so that method calls can be chained together. * @see Status */ public UpdateEnrollmentStatusRequest withStatus(Status status) { this.status = status.toString(); return this; } /** * <p> * Indicates whether to enroll member accounts of the organization if the account is the management account of an * organization. * </p> * * @param includeMemberAccounts * Indicates whether to enroll member accounts of the organization if the account is the management account * of an organization. */ public void setIncludeMemberAccounts(Boolean includeMemberAccounts) { this.includeMemberAccounts = includeMemberAccounts; } /** * <p> * Indicates whether to enroll member accounts of the organization if the account is the management account of an * organization. * </p> * * @return Indicates whether to enroll member accounts of the organization if the account is the management account * of an organization. */ public Boolean getIncludeMemberAccounts() { return this.includeMemberAccounts; } /** * <p> * Indicates whether to enroll member accounts of the organization if the account is the management account of an * organization. * </p> * * @param includeMemberAccounts * Indicates whether to enroll member accounts of the organization if the account is the management account * of an organization. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdateEnrollmentStatusRequest withIncludeMemberAccounts(Boolean includeMemberAccounts) { setIncludeMemberAccounts(includeMemberAccounts); return this; } /** * <p> * Indicates whether to enroll member accounts of the organization if the account is the management account of an * organization. * </p> * * @return Indicates whether to enroll member accounts of the organization if the account is the management account * of an organization. */ public Boolean isIncludeMemberAccounts() { return this.includeMemberAccounts; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getIncludeMemberAccounts() != null) sb.append("IncludeMemberAccounts: ").append(getIncludeMemberAccounts()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdateEnrollmentStatusRequest == false) return false; UpdateEnrollmentStatusRequest other = (UpdateEnrollmentStatusRequest) obj; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getIncludeMemberAccounts() == null ^ this.getIncludeMemberAccounts() == null) return false; if (other.getIncludeMemberAccounts() != null && other.getIncludeMemberAccounts().equals(this.getIncludeMemberAccounts()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getIncludeMemberAccounts() == null) ? 0 : getIncludeMemberAccounts().hashCode()); return hashCode; } @Override public UpdateEnrollmentStatusRequest clone() { return (UpdateEnrollmentStatusRequest) super.clone(); } }
/* * The Dragonite Project * ------------------------- * See the LICENSE file in the root directory for license information. */ package com.vecsight.dragonite.forwarder.network.client; import com.vecsight.dragonite.forwarder.config.ForwarderClientConfig; import com.vecsight.dragonite.forwarder.exception.IncorrectHeaderException; import com.vecsight.dragonite.forwarder.exception.ServerRejectedException; import com.vecsight.dragonite.forwarder.header.ClientInfoHeader; import com.vecsight.dragonite.forwarder.header.ServerResponseHeader; import com.vecsight.dragonite.forwarder.misc.ForwarderGlobalConstants; import com.vecsight.dragonite.forwarder.network.Pipe; import com.vecsight.dragonite.mux.conn.MultiplexedConnection; import com.vecsight.dragonite.mux.conn.Multiplexer; import com.vecsight.dragonite.mux.exception.ConnectionAlreadyExistsException; import com.vecsight.dragonite.mux.exception.MultiplexerClosedException; import com.vecsight.dragonite.sdk.config.DragoniteSocketParameters; import com.vecsight.dragonite.sdk.exception.ConnectionNotAliveException; import com.vecsight.dragonite.sdk.exception.DragoniteException; import com.vecsight.dragonite.sdk.exception.IncorrectSizeException; import com.vecsight.dragonite.sdk.exception.SenderClosedException; import com.vecsight.dragonite.sdk.socket.DragoniteClientSocket; import com.vecsight.dragonite.utils.system.SystemInfo; import com.vecsight.dragonite.utils.type.UnitConverter; import org.pmw.tinylog.Logger; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; public class ForwarderClient { private final InetSocketAddress remoteAddress; private final int localPort; private final int downMbps, upMbps; private final DragoniteSocketParameters dragoniteSocketParameters; private volatile boolean doAccept = true; private final ServerSocket serverSocket; private volatile DragoniteClientSocket dragoniteClientSocket; private volatile Multiplexer multiplexer; private final Thread acceptThread; private volatile Thread muxReceiveThread; private short nextConnID = 0; //single-threaded internal private final Object connectLock = new Object(); public ForwarderClient(final ForwarderClientConfig config) throws IOException, InterruptedException, DragoniteException, IncorrectHeaderException, ServerRejectedException { this.remoteAddress = config.getRemoteAddress(); this.localPort = config.getLocalPort(); this.downMbps = config.getDownMbps(); this.upMbps = config.getUpMbps(); this.dragoniteSocketParameters = config.getDragoniteSocketParameters(); serverSocket = new ServerSocket(localPort); prepareUnderlyingConnection(dragoniteSocketParameters); acceptThread = new Thread(() -> { Socket socket; try { while (doAccept && (socket = serverSocket.accept()) != null) { handleConnection(socket); } } catch (final IOException e) { Logger.error(e, "Unable to accept TCP connections"); } }, "FC-Accept"); acceptThread.start(); } private void prepareUnderlyingConnection(final DragoniteSocketParameters dragoniteSocketParameters) throws IOException, InterruptedException, DragoniteException, IncorrectHeaderException, ServerRejectedException { dragoniteClientSocket = new DragoniteClientSocket(remoteAddress, UnitConverter.mbpsToSpeed(upMbps), dragoniteSocketParameters); dragoniteClientSocket.setDescription("Forwarder"); try { dragoniteClientSocket.send(new ClientInfoHeader(downMbps, upMbps, SystemInfo.getUsername(), ForwarderGlobalConstants.APP_VERSION, SystemInfo.getOS()).toBytes()); final byte[] response = dragoniteClientSocket.read(); final ServerResponseHeader responseHeader = new ServerResponseHeader(response); if (responseHeader.getStatus() != 0) { Logger.error("The server has rejected this connection (Error code {}): {}", responseHeader.getStatus(), responseHeader.getMsg()); throw new ServerRejectedException(responseHeader.getMsg()); } else if (responseHeader.getMsg().length() > 0) { Logger.info("Server welcome message: {}", responseHeader.getMsg()); } } catch (InterruptedException | IOException | DragoniteException | IncorrectHeaderException | ServerRejectedException e) { Logger.error(e, "Unable to connect to remote server"); try { dragoniteClientSocket.closeGracefully(); } catch (InterruptedException | SenderClosedException | IOException ignored) { } throw e; } multiplexer = new Multiplexer(bytes -> { try { dragoniteClientSocket.send(bytes); } catch (InterruptedException | IncorrectSizeException | IOException | SenderClosedException e) { Logger.error(e, "Multiplexer is unable to send data"); } }, ForwarderGlobalConstants.MAX_FRAME_SIZE); if (muxReceiveThread != null) muxReceiveThread.interrupt(); muxReceiveThread = new Thread(() -> { byte[] buf; try { while ((buf = dragoniteClientSocket.read()) != null) { multiplexer.onReceiveBytes(buf); } } catch (InterruptedException | ConnectionNotAliveException e) { Logger.error(e, "Cannot receive data from underlying socket"); } finally { synchronized (connectLock) { try { dragoniteClientSocket.closeGracefully(); } catch (final Exception ignored) { } multiplexer.close(); } } }, "FC-MuxReceive"); muxReceiveThread.start(); Logger.info("Connection established with {}", remoteAddress.toString()); } private void handleConnection(final Socket socket) { Logger.debug("New connection from {}", socket.getRemoteSocketAddress().toString()); synchronized (connectLock) { if (!dragoniteClientSocket.isAlive()) { multiplexer.close(); Logger.warn("The underlying connection is no longer alive, reconnecting"); try { prepareUnderlyingConnection(dragoniteSocketParameters); } catch (IOException | InterruptedException | DragoniteException | IncorrectHeaderException | ServerRejectedException e) { Logger.error(e, "Unable to reconnect, there may be a network error or the server has been shut down"); try { socket.close(); } catch (final IOException ignored) { } return; } } } try { final MultiplexedConnection multiplexedConnection = multiplexer.createConnection(nextConnID++); final Thread pipeFromRemoteThread = new Thread(() -> { final Pipe pipeFromRemotePipe = new Pipe(ForwarderGlobalConstants.PIPE_BUFFER_SIZE); try { pipeFromRemotePipe.pipe(multiplexedConnection, socket.getOutputStream()); } catch (final Exception e) { Logger.debug(e, "Pipe closed"); } finally { try { socket.close(); } catch (final IOException ignored) { } multiplexedConnection.close(); } }, "FC-R2L"); pipeFromRemoteThread.start(); final Thread pipeFromLocalThread = new Thread(() -> { final Pipe pipeFromLocalPipe = new Pipe(ForwarderGlobalConstants.PIPE_BUFFER_SIZE); try { pipeFromLocalPipe.pipe(socket.getInputStream(), multiplexedConnection); } catch (final Exception e) { Logger.debug(e, "Pipe closed"); } finally { try { socket.close(); } catch (final IOException ignored) { } multiplexedConnection.close(); } }, "FC-L2R"); pipeFromLocalThread.start(); } catch (ConnectionAlreadyExistsException | MultiplexerClosedException e) { Logger.error(e, "Cannot create multiplexed connection"); try { socket.close(); } catch (final IOException ignored) { } } } public boolean isDoAccept() { return doAccept; } public void stopAccept() { acceptThread.interrupt(); doAccept = false; } }
/* * Copyright 2014 Guidewire Software, Inc. */ package gw.internal.gosu.parser.statements; import gw.internal.gosu.parser.CannotExecuteGosuException; import gw.internal.gosu.parser.Expression; import gw.internal.gosu.parser.Statement; import gw.internal.gosu.parser.Symbol; import gw.lang.parser.IExpression; import gw.lang.parser.IStackProvider; import gw.lang.parser.ISymbolTable; import gw.lang.parser.statements.IAssertStatement; import gw.lang.parser.statements.IForEachStatement; import gw.lang.parser.statements.ILoopStatement; import gw.lang.parser.statements.IReturnStatement; import gw.lang.parser.statements.ITerminalStatement; import gw.lang.parser.statements.IThrowStatement; import gw.util.GosuObjectUtil; /** * Represents a foreach statement as specified in the Gosu grammar: * <pre> * <i>for...in-statement</i> * <b>for</b> <b>(</b> &lt;identifier&gt; <b>in</b> &lt;expression&gt; [ <b>index</b> &lt;identifier&gt; ] <b>)</b> &lt;statement&gt; * </pre> * <p/> * * @see gw.lang.parser.IGosuParser */ public final class ForEachStatement extends LoopStatement implements IForEachStatement { protected Symbol _identifier; protected Expression _expression; protected Symbol _indexIdentifier; private Symbol _iterIdentifier; protected Statement _statement; protected IStackProvider _stackProvider; private int _iIdentifierOffset; private int _iIndexIdentifierOffset; private int _iIterOffset; private boolean _bStructuralIterable; /** * Constructs a ForEachStatement given an ISymbolTable instance. */ public ForEachStatement( ISymbolTable stackProvider ) { _stackProvider = stackProvider; } /** * @return The name of the Indentifier in the statement. */ public Symbol getIdentifier() { return _identifier; } /** * @param identifier */ public void setIdentifier( Symbol identifier ) { _identifier = identifier; } /** * @return The name of the Index Identifier, or null of not specified. */ public Symbol getIndexIdentifier() { return _indexIdentifier; } /** * @param indexIdentifier */ public void setIndexIdentifier( Symbol indexIdentifier ) { _indexIdentifier = indexIdentifier; } /** * @return The name of the Index Identifier, or null of not specified. */ public Symbol getIteratorIdentifier() { return _iterIdentifier; } public void setIteratorIdentifier( Symbol iterIdentifier ) { _iterIdentifier = iterIdentifier; } @Override public IExpression getExpression() { return getInExpression(); } /** * @return The In Expression. */ public Expression getInExpression() { return _expression; } /** * @param expression The In Expression. */ public void setInExpression( Expression expression ) { _expression = expression; } /** * @return The statement to execute in the interation. */ public Statement getStatement() { return _statement; } /** * @param statement The statement to execute in the interation. */ public void setStatement( Statement statement ) { _statement = statement; if( _statement instanceof StatementList ) { // Use this for-stmt's scope. This is purely a performance feature and not // without a drawback -- in the debugger this for-stmt's scope may have // residual symbols from the execution of its statement-list. This is // really only an issue in the debugger where one may see symbols that are // left over from the previous step in the loop. ((StatementList)_statement).setNoScope(); } } public Object execute() { if( !isCompileTimeConstant() ) { return super.execute(); } throw new CannotExecuteGosuException(); } @Override protected ITerminalStatement getLeastSignificantTerminalStatement_internal( boolean[] bAbsolute ) { if( _statement != null ) { ITerminalStatement terminalStmt = _statement.getLeastSignificantTerminalStatement( bAbsolute ); if( terminalStmt instanceof IReturnStatement || terminalStmt instanceof IAssertStatement || terminalStmt instanceof IThrowStatement || terminalStmt instanceof ILoopStatement ) { bAbsolute[0] = false; return terminalStmt; } } return null; } @Override public String toString() { String strIndex = _indexIdentifier == null ? null : _indexIdentifier.getName(); if( strIndex != null ) { strIndex = " index " + strIndex; } else { strIndex = ""; } return "for( " + (getIdentifier() == null ? "" : getIdentifier().getName()) + " in " + toString(getInExpression()) + strIndex + ")\n" + toString(getStatement()); } private String toString(Object o) { return o == null ? "" : o.toString(); } @Override public int getNameOffset( String identifierName ) { if (identifierName.toString().equals(_identifier.getName())) { return _iIdentifierOffset; } else if (identifierName.toString().equals(_indexIdentifier.getName())) { return _iIndexIdentifierOffset; } else if (identifierName.toString().equals(_iterIdentifier.getName())) { return _iIterOffset; } else { throw new RuntimeException("Wrong name " + identifierName); } } @Override public void setNameOffset( int iOffset, String identifierName ) { _iIdentifierOffset = iOffset; } public void setIndexNameOffset( int iOffset ) { _iIndexIdentifierOffset = iOffset; } public void setIterNameOffset( int iOffset ) { _iIterOffset = iOffset; } public boolean declares( String identifierName ) { return ((getIdentifier() != null) && GosuObjectUtil.equals( getIdentifier().getName(), identifierName )) || ((getIndexIdentifier() != null) && GosuObjectUtil.equals( getIndexIdentifier().getName(), identifierName )) || ((getIteratorIdentifier() != null) && GosuObjectUtil.equals( getIteratorIdentifier().getName(), identifierName )); } public String[] getDeclarations() { if (getIndexIdentifier() != null) { return new String[] {getIdentifier().getName(), getIndexIdentifier().getName()}; } else if (getIdentifier() != null) { return new String[] {getIdentifier().getName()}; } else if (getIteratorIdentifier() != null) { return new String[] {getIteratorIdentifier().getName()}; } else { return new String[1]; } } public boolean isStructuralIterable() { return _bStructuralIterable; } public void setStructuralIterable( boolean bStructuralIterable ) { _bStructuralIterable = bStructuralIterable; } }
/* * Copyright (c) 2008, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.registry.extensions.handlers.utils; import org.apache.axiom.om.OMAbstractFactory; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMFactory; import org.apache.axiom.om.OMNamespace; import org.apache.axiom.om.impl.builder.StAXOMBuilder; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.axiom.om.xpath.AXIOMXPath; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.registry.core.Association; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.ResourceImpl; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext; import org.wso2.carbon.registry.core.session.CurrentSession; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.registry.extensions.services.Utils; import org.wso2.carbon.registry.extensions.utils.CommonConstants; import org.wso2.carbon.registry.extensions.utils.CommonUtil; import javax.xml.namespace.QName; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import java.io.StringReader; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; public class EndpointUtils { private static final Log log = LogFactory.getLog(EndpointUtils.class); private static final String SOAP11_ENDPOINT_EXPR = "/wsdl:definitions/wsdl:service/wsdl:port/soap:address"; private static final String SOAP12_ENDPOINT_EXPR = "/wsdl:definitions/wsdl:service/wsdl:port/soap12:address"; private static final String HTTP_ENDPOINT_EXPR = "/wsdl:definitions/wsdl:service/wsdl:port/http:address"; private static final String SERVICE_ENDPOINT_ENTRY_EXPR = "/s:metadata/s:endpoints/s:entry"; private static final String SERVICE_ENDPOINT_EXPR = "/s:metadata/s:endpoints"; private static final String SERVICE_ENDPOINTS_ELEMENT = "endpoints"; private static final String SERVICE_ENDPOINTS_ENTRY_ELEMENT = "entry"; private static final String LOCATION_ATTR = "location"; private static final String SYNAPSE_ENDPOINT = "endpoint"; private static final String SYNAPSE_ENDPOINT_NAME_ATTRIBUTE = "name"; private static final String SYNAPSE_ENDPOINT_ADDRESS = "address"; private static final String SYNAPSE_ENDPOINT_ADDRESS_URI_ATTRIBUTE = "uri"; private static final String SYNAPSE_ENDPOINT_OVERVIEW = "overview"; private static final String SYNAPSE_ENDPOINT_VERSION = "version"; private static final String SYNAPSE_ENDPOINT_NAME = "name"; private static final String ENDPOINT_RESOURCE_PREFIX = "ep-"; private static final String ENDPOINT_NAMESPACE_ATTRIBUTE = "xmlns"; private static final String ENDPOINT_ELEMENT_NAMESPACE = "http://www.wso2.org/governance/metadata"; private static String endpointVersion = CommonConstants.ENDPOINT_VERSION_DEFAULT_VALUE; private static final String ENDPOINT_DEFAULT_LOCATION = "/trunk/endpoints/"; private static String endpointLocation = ENDPOINT_DEFAULT_LOCATION; private static String endpointMediaType = CommonConstants.ENDPOINT_MEDIA_TYPE; private static boolean includeNamespaceInName = false; public static boolean isIncludeNamespaceInName() { return includeNamespaceInName; } public static void setIncludeNamespaceInName(boolean includeNamespaceInName) { EndpointUtils.includeNamespaceInName = includeNamespaceInName; } public static void setEndpointLocation(String endpointLocation) { EndpointUtils.endpointLocation = endpointLocation; } public static String getEndpointLocation() { return endpointLocation; } public static void setEndpointMediaType(String mediaType) { endpointMediaType = mediaType; } public static String getEndpointMediaType() { return endpointMediaType; } public static void removeEndpointEntry(String oldWSDL, String servicePath, OMElement serviceElement, Registry registry) throws RegistryException { List<OMElement> serviceEndpointEntryElements = getOmElements(serviceElement); if (serviceEndpointEntryElements == null || serviceEndpointEntryElements.size() == 0) { return; } String endpointURL = null; if (registry.resourceExists(oldWSDL)){ Association[] associations = registry.getAssociations(oldWSDL, CommonConstants.DEPENDS); for (Association association: associations) { String targetPath = association.getDestinationPath(); if (registry.resourceExists(targetPath)) { Resource targetResource = registry.get(targetPath); if (CommonConstants.ENDPOINT_MEDIA_TYPE.equals(targetResource.getMediaType())) { byte[] sourceContent = (byte[]) targetResource.getContent(); if (sourceContent == null) { continue; } endpointURL = EndpointUtils.deriveEndpointFromContent(RegistryUtils.decodeBytes(sourceContent)); } } } } for(OMElement endpointOmElement : serviceEndpointEntryElements){ if(endpointOmElement!=null){ String entryText = endpointOmElement.getText(); if (endpointURL != null && entryText.contains(endpointURL) ){ endpointOmElement.detach(); } } } } private static List<OMElement> getOmElements(OMElement serviceElement) throws RegistryException { List<OMElement> serviceEndpointEntryElements; try { serviceEndpointEntryElements = evaluateXPathToElements(SERVICE_ENDPOINT_ENTRY_EXPR, serviceElement); } catch (Exception e) { String msg = "Error in evaluating xpath expressions to extract endpoints"; log.error(msg, e); throw new RegistryException(msg, e); } return serviceEndpointEntryElements; } public static void removeEndpointEntry(RequestContext requestContext, OMElement serviceElement ,String servicePath, Registry registry) throws RegistryException { Association[] associations = registry.getAssociations(servicePath, CommonConstants.DEPENDS); for (Association association: associations) { String targetPath = association.getDestinationPath(); if (registry.resourceExists(targetPath)) { Resource targetResource = registry.get(targetPath); if (CommonConstants.ENDPOINT_MEDIA_TYPE.equals(targetResource.getMediaType())) { registry.removeAssociation(servicePath, targetResource.getPath(), CommonConstants.DEPENDS); registry.removeAssociation(targetResource.getPath(), servicePath, CommonConstants.USED_BY); } } } List<OMElement> serviceEndpointEntryElements = getOmElements(serviceElement); String serviceVersion = CommonUtil.getServiceVersion(serviceElement); endpointVersion = serviceVersion; for (OMElement endpointElement: serviceEndpointEntryElements) { Map<String, String> properties = new HashMap<String, String>(); String entryText = endpointElement.getText(); String entryKey = null; String entryVal; int colonIndex = entryText.indexOf(":"); if (colonIndex < entryText.length()- 1) { entryKey = entryText.substring(0, colonIndex); entryText = entryText.substring(colonIndex + 1); } entryVal = entryText; if (!"".equals(entryKey)) { // here the key is the environment String endpointPath = RegistryUtils.getAbsolutePath(registry.getRegistryContext(), org.wso2.carbon.registry.core.RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + endpointLocation) + deriveEndpointFromUrl(entryVal); String existingEnv = null; if (registry.resourceExists(endpointPath)) { registry.get(endpointPath).removeProperty(CommonConstants.ENDPOINT_ENVIRONMENT_ATTR); } existingEnv = entryKey; properties.put(CommonConstants.ENDPOINT_ENVIRONMENT_ATTR, existingEnv); } // the entry value is the url saveEndpoint(requestContext, registry, entryVal, servicePath, properties, CommonUtil.getUnchrootedSystemRegistry(requestContext)); } } public static void saveEndpointsFromWSDL(RequestContext context, String wsdlPath, Resource wsdlResource, Registry registry, Registry systemRegistry) throws RegistryException { // building the wsdl element. byte[] wsdlContentBytes = (byte[])wsdlResource.getContent(); if (wsdlContentBytes == null) { return; } OMElement wsdlElement; try { wsdlElement = buildOMElement(RegistryUtils.decodeBytes(wsdlContentBytes)); } catch (Exception e) { String msg = "Error in building the wsdl element for path: " + wsdlPath + "."; throw new RegistryException(msg, e); } // If the version field is not blank endpointVersion is modified accordingly if (StringUtils.isNotBlank(wsdlResource.getProperty("version"))) { endpointVersion = wsdlResource.getProperty("version"); } // saving soap11 endpoints List<OMElement> soap11Elements; try { soap11Elements = evaluateXPathToElements(SOAP11_ENDPOINT_EXPR, wsdlElement); } catch (Exception e) { String msg = "Error in evaluating xpath expressions to extract endpoints, wsdl path: " + wsdlPath + "."; log.error(msg, e); throw new RegistryException(msg, e); } for (OMElement soap11Element: soap11Elements) { String locationUrl = soap11Element.getAttributeValue(new QName(LOCATION_ATTR)); Map<String, String> properties = new HashMap<String, String>(); properties.put(CommonConstants.SOAP11_ENDPOINT_ATTRIBUTE, "true"); properties.put(CommonConstants.SOURCE_PROPERTY, CommonConstants.SOURCE_AUTO); saveEndpoint(context,registry, locationUrl, wsdlPath, properties, systemRegistry); } // saving soap12 endpoints List<OMElement> soap12Elements; try { soap12Elements = evaluateXPathToElements(SOAP12_ENDPOINT_EXPR, wsdlElement); } catch (Exception e) { String msg = "Error in evaluating xpath expressions to extract endpoints, wsdl path: " + wsdlPath + "."; log.error(msg, e); throw new RegistryException(msg, e); } for (OMElement soap12Element: soap12Elements) { String locationUrl = soap12Element.getAttributeValue(new QName(LOCATION_ATTR)); Map<String, String> properties = new HashMap<String, String>(); properties.put(CommonConstants.SOAP12_ENDPOINT_ATTRIBUTE, "true"); properties.put(CommonConstants.SOURCE_PROPERTY, CommonConstants.SOURCE_AUTO); saveEndpoint(context, registry, locationUrl, wsdlPath, properties, systemRegistry); } // saving http endpoints List<OMElement> httpElements; try { httpElements = evaluateXPathToElements(HTTP_ENDPOINT_EXPR, wsdlElement); } catch (Exception e) { String msg = "Error in evaluating xpath expressions to extract endpoints, wsdl path: " + wsdlPath + "."; log.error(msg, e); throw new RegistryException(msg, e); } for (OMElement httpElement: httpElements) { String locationUrl = httpElement.getAttributeValue(new QName(LOCATION_ATTR)); Map<String, String> properties = new HashMap<String, String>(); properties.put(CommonConstants.HTTP_ENDPOINT_ATTRIBUTE, "true"); properties.put(CommonConstants.SOURCE_PROPERTY, CommonConstants.SOURCE_AUTO); saveEndpoint(context, registry, locationUrl, wsdlPath, properties, systemRegistry); } } public static void saveEndpointsFromWSDL(RequestContext context, String wsdlPath, Resource wsdlResource, Registry registry, Registry systemRegistry,String environment ,List<String> dependencies,String version) throws RegistryException { // building the wsdl element. byte[] wsdlContentBytes = (byte[])wsdlResource.getContent(); if (wsdlContentBytes == null) { return; } OMElement wsdlElement; try { wsdlElement = buildOMElement(RegistryUtils.decodeBytes(wsdlContentBytes)); } catch (Exception e) { String msg = "Error in building the wsdl element for path: " + wsdlPath + "."; log.error(msg, e); throw new RegistryException(msg, e); } // saving soap11 endpoints List<OMElement> soap11Elements; try { soap11Elements = evaluateXPathToElements(SOAP11_ENDPOINT_EXPR, wsdlElement); } catch (Exception e) { String msg = "Error in evaluating xpath expressions to extract endpoints, wsdl path: " + wsdlPath + "."; log.error(msg, e); throw new RegistryException(msg, e); } for (OMElement soap11Element: soap11Elements) { String locationUrl = soap11Element.getAttributeValue(new QName(LOCATION_ATTR)); Map<String, String> properties = new HashMap<String, String>(); properties.put(CommonConstants.SOAP11_ENDPOINT_ATTRIBUTE, "true"); properties.put(CommonConstants.SOURCE_PROPERTY, CommonConstants.SOURCE_AUTO); saveEndpoint(context,registry, locationUrl, wsdlPath, properties, systemRegistry,environment,dependencies,version); } // saving soap12 endpoints List<OMElement> soap12Elements; try { soap12Elements = evaluateXPathToElements(SOAP12_ENDPOINT_EXPR, wsdlElement); } catch (Exception e) { String msg = "Error in evaluating xpath expressions to extract endpoints, wsdl path: " + wsdlPath + "."; log.error(msg, e); throw new RegistryException(msg, e); } for (OMElement soap12Element: soap12Elements) { String locationUrl = soap12Element.getAttributeValue(new QName(LOCATION_ATTR)); Map<String, String> properties = new HashMap<String, String>(); properties.put(CommonConstants.SOAP12_ENDPOINT_ATTRIBUTE, "true"); properties.put(CommonConstants.SOURCE_PROPERTY, CommonConstants.SOURCE_AUTO); saveEndpoint(context, registry, locationUrl, wsdlPath, properties, systemRegistry,environment,dependencies,version); } // saving http endpoints List<OMElement> httpElements; try { httpElements = evaluateXPathToElements(HTTP_ENDPOINT_EXPR, wsdlElement); } catch (Exception e) { String msg = "Error in evaluating xpath expressions to extract endpoints, wsdl path: " + wsdlPath + "."; log.error(msg, e); throw new RegistryException(msg, e); } for (OMElement httpElement: httpElements) { String locationUrl = httpElement.getAttributeValue(new QName(LOCATION_ATTR)); Map<String, String> properties = new HashMap<String, String>(); properties.put(CommonConstants.HTTP_ENDPOINT_ATTRIBUTE, "true"); properties.put(CommonConstants.SOURCE_PROPERTY, CommonConstants.SOURCE_AUTO); saveEndpoint(context, registry, locationUrl, wsdlPath, properties, systemRegistry,environment,dependencies,version); } } public static void saveEndpointsFromServices(RequestContext context, String servicePath, OMElement serviceElement, Registry registry, Registry systemRegistry) throws RegistryException { // first iterate through soap11 endpoints // saving soap11 endpoints List<OMElement> serviceEndpointEntryElements = getOmElements(serviceElement); String serviceVersion = CommonUtil.getServiceVersion(serviceElement); endpointVersion = serviceVersion; // and add the associations and before adding them first remove all the endpoint dependencies removeEndpointDependencies(servicePath, registry); // iterate through the new endpoints.. for (OMElement endpointElement: serviceEndpointEntryElements) { Map<String, String> properties = new HashMap<String, String>(); String entryText = endpointElement.getText(); String entryKey = null; String entryVal; int colonIndex = entryText.indexOf(":"); if (colonIndex < entryText.length()- 1) { entryKey = entryText.substring(0, colonIndex); entryText = entryText.substring(colonIndex + 1); } entryVal = entryText; if (!"".equals(entryKey)) { // here the key is the environment String endpointPath = RegistryUtils.getAbsolutePath(registry.getRegistryContext(), org.wso2.carbon.registry.core.RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + endpointLocation) + deriveEndpointFromUrl(entryVal); String existingEnv = null; if (registry.resourceExists(endpointPath)) { registry.get(endpointPath).removeProperty(CommonConstants.ENDPOINT_ENVIRONMENT_ATTR); } existingEnv = entryKey; properties.put(CommonConstants.ENDPOINT_ENVIRONMENT_ATTR, existingEnv); } // the entry value is the url saveEndpoint(context, registry, entryVal, servicePath, properties, systemRegistry); } // and we are getting the endpoints of all the attached wsdls. addAssociations(servicePath, registry); } public static void saveEndpointsFromServices(RequestContext context,String servicePath, OMElement serviceElement, Registry registry, Registry systemRegistry,String environment) throws RegistryException { if (!CommonUtil.isAddingAssociationLockAvailable()) { return; } CommonUtil.acquireAddingAssociationLock(); try { // first iterate through soap11 endpoints // saving soap11 endpoints List<OMElement> serviceEndpointEntryElements = getOmElements(serviceElement); // and add the associations and before adding them first remove all the endpoint dependencies removeEndpointDependencies(servicePath, registry); // iterate through the new endpoints.. for (OMElement endpointElement: serviceEndpointEntryElements) { Map<String, String> properties = new HashMap<String, String>(); String entryText = endpointElement.getText(); String entryKey = null; String entryVal; int colonIndex = entryText.indexOf(":"); if (colonIndex < entryText.length()- 1) { entryKey = entryText.substring(0, colonIndex); entryText = entryText.substring(colonIndex + 1); } entryVal = entryText; if (!"".equals(entryKey)) { // here the key is the environment String endpointPath = RegistryUtils.getAbsolutePath(registry.getRegistryContext(), org.wso2.carbon.registry.core.RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + environment) + deriveEndpointFromUrl(entryVal); String existingEnv = null; if (registry.resourceExists(endpointPath)) { registry.get(endpointPath).removeProperty(CommonConstants.ENDPOINT_ENVIRONMENT_ATTR); } existingEnv = entryKey; properties.put(CommonConstants.ENDPOINT_ENVIRONMENT_ATTR, existingEnv); } // the entry value is the url saveEndpoint(context,registry, entryVal, servicePath, properties, systemRegistry,environment); } } finally { CommonUtil.releaseAddingAssociationLock(); } // and we are getting the endpoints of all the attached wsdls. addAssociations(servicePath, registry); } private static void addAssociations(String servicePath, Registry registry) throws RegistryException { Association[] associations = registry.getAssociations(servicePath, CommonConstants.DEPENDS); for (Association association: associations) { String targetPath = association.getDestinationPath(); if (registry.resourceExists(targetPath)) { Resource targetResource = registry.get(targetPath); if (CommonConstants.WSDL_MEDIA_TYPE.equals(targetResource.getMediaType())) { // for the wsdl, we are getting all the endpoints Association[] wsdlAssociations = registry.getAssociations(targetPath, CommonConstants.DEPENDS); for (Association wsdlAssociation: wsdlAssociations) { String wsdlTargetPath = wsdlAssociation.getDestinationPath(); if (registry.resourceExists(wsdlTargetPath)) { Resource wsdlTargetResource = registry.get(wsdlTargetPath); if (CommonConstants.ENDPOINT_MEDIA_TYPE.equals( wsdlTargetResource.getMediaType())) { // so it is the wsdl associated to endpoints, // so we associate these endpoints to the services as well. registry.addAssociation(servicePath, wsdlTargetPath, CommonConstants.DEPENDS); registry.addAssociation(wsdlTargetPath, servicePath, CommonConstants.USED_BY); } } } } } } } private static void removeEndpointDependencies(String servicePath, Registry registry) throws RegistryException { // update lock check removed from for loop to prevent the database lock Association[] associations = registry.getAllAssociations(servicePath); for (Association association : associations) { String path = association.getDestinationPath(); if (registry.resourceExists(path)) { Resource endpointResource = registry.get(path); if (CommonConstants.ENDPOINT_MEDIA_TYPE.equals(endpointResource.getMediaType())) { registry.removeAssociation(servicePath, path, CommonConstants.DEPENDS); registry.removeAssociation(path, servicePath, CommonConstants.USED_BY); } } } } private static String[] wsdlPrefixes = { "wsdl", "http://schemas.xmlsoap.org/wsdl/", "wsdl2", "http://www.w3.org/ns/wsdl", "xsd", "http://www.w3.org/2001/XMLSchema", "soap", "http://schemas.xmlsoap.org/wsdl/soap/", "soap12", "http://schemas.xmlsoap.org/wsdl/soap12/", "http", "http://schemas.xmlsoap.org/wsdl/http/", "s", CommonConstants.SERVICE_ELEMENT_NAMESPACE, }; private static List<OMElement> evaluateXPathToElements(String expression, OMElement root) throws Exception { String[] nsPrefixes = wsdlPrefixes; AXIOMXPath xpathExpression = new AXIOMXPath(expression); for (int j = 0; j < nsPrefixes.length; j ++) { xpathExpression.addNamespace(nsPrefixes[j++], nsPrefixes[j]); } return (List<OMElement>)xpathExpression.selectNodes(root); } private static OMElement buildOMElement(String content) throws Exception { XMLStreamReader parser; try { parser = XMLInputFactory.newInstance().createXMLStreamReader(new StringReader(content)); } catch (XMLStreamException e) { String msg = "Error in initializing the parser to build the OMElement."; throw new Exception(msg, e); } //create the builder StAXOMBuilder builder = new StAXOMBuilder(parser); //get the root element (in this case the envelope) return builder.getDocumentElement(); } private static void saveEndpoint(RequestContext context, Registry registry, String url, String associatedPath, Map<String, String> properties, Registry systemRegistry, String environment) throws RegistryException { String pathExpression = getEndpointLocation(context, url, systemRegistry, environment); String urlToPath = deriveEndpointFromUrl(url); String endpointAbsoluteBasePath = RegistryUtils.getAbsolutePath(registry.getRegistryContext(), org.wso2.carbon.registry.core.RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + environment); if (!systemRegistry.resourceExists(endpointAbsoluteBasePath)) { systemRegistry.put(endpointAbsoluteBasePath, systemRegistry.newCollection()); } String relativePath = environment + urlToPath; String endpointAbsolutePath = pathExpression; saveEndpointValues(context, registry, url, associatedPath, properties, systemRegistry, relativePath, endpointAbsolutePath); } private static void saveEndpoint(RequestContext context, Registry registry, String url, String associatedPath, Map<String, String> properties, Registry systemRegistry, String environment, List<String> dependencies, String version) throws RegistryException { String urlToPath = deriveEndpointFromUrl(url); String pathExpression = getEndpointLocation(context, url, systemRegistry, environment); String endpointAbsoluteBasePath = RegistryUtils.getAbsolutePath(registry.getRegistryContext(), environment); if (!systemRegistry.resourceExists(endpointAbsoluteBasePath)) { systemRegistry.put(endpointAbsoluteBasePath, systemRegistry.newCollection()); } String prefix = urlToPath.substring(0,urlToPath.lastIndexOf(RegistryConstants.PATH_SEPARATOR) +1 ); String name = urlToPath.replace(prefix,""); String regex = endpointAbsoluteBasePath + prefix + "[\\d].[\\d].[\\d]" + RegistryConstants.PATH_SEPARATOR + name; for (String dependency : dependencies) { if(dependency.matches(regex)){ String newRelativePath = RegistryUtils.getRelativePathToOriginal(dependency, org.wso2.carbon.registry.core.RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH ); saveEndpointValues(context,registry, url, associatedPath, properties, systemRegistry, newRelativePath, dependency); return; } } String endpointAbsolutePath = environment + prefix + version + RegistryConstants.PATH_SEPARATOR + name; String relativePath = environment.substring(0,RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH.length()) + prefix + version + RegistryConstants.PATH_SEPARATOR + name; saveEndpointValues(context, registry, url, associatedPath, properties, systemRegistry, relativePath, endpointAbsolutePath); } private static void saveEndpoint(RequestContext context, Registry registry, String url, String associatedPath, Map<String, String> properties, Registry systemRegistry) throws RegistryException { String urlToPath = deriveEndpointFromUrl(url); String pathExpression = getEndpointLocation(context, url, systemRegistry, endpointLocation); String endpointAbsoluteBasePath = RegistryUtils.getAbsolutePath(registry.getRegistryContext(), org.wso2.carbon.registry.core.RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + endpointLocation); if (!systemRegistry.resourceExists(endpointAbsoluteBasePath)) { systemRegistry.put(endpointAbsoluteBasePath, systemRegistry.newCollection()); } if(endpointLocation.endsWith(RegistryConstants.PATH_SEPARATOR)){ if(urlToPath.startsWith(RegistryConstants.PATH_SEPARATOR)){ urlToPath = urlToPath.replaceFirst(RegistryConstants.PATH_SEPARATOR,""); } }else{ if(!urlToPath.startsWith(RegistryConstants.PATH_SEPARATOR)){ urlToPath = RegistryConstants.PATH_SEPARATOR + urlToPath; } } String relativePath = endpointLocation + urlToPath; String endpointAbsolutePath = pathExpression; saveEndpointValues(context, registry, url, associatedPath, properties, systemRegistry, relativePath, endpointAbsolutePath); } /** * This method used to generate registry path of the endpoint. In the non-OSGi environment it will execute the else * condition * @param context Request Context * @param url Endpoint URL * @param systemRegistry Registry instance * @param environment Environment * @return Populated Registry Path * @throws RegistryException */ private static String getEndpointLocation(RequestContext context, String url, Registry systemRegistry, String environment) throws RegistryException { if (Utils.getRxtService() != null) { String pathExpression = Utils.getRxtService().getStoragePath(CommonConstants.ENDPOINT_MEDIA_TYPE); pathExpression = CommonUtil .replaceExpressionOfPath(pathExpression, "name", deriveEndpointNameWithNamespaceFromUrl(url)); pathExpression = CommonUtil .replaceExpressionOfPath(pathExpression, "version", endpointVersion); pathExpression = CommonUtil.getPathFromPathExpression(pathExpression, context.getResource().getProperties(), null); String namespace = deriveEndpointNamespaceFromUrl(url).replace("//", "/"); pathExpression = CommonUtil.replaceExpressionOfPath(pathExpression, "namespace", namespace); pathExpression = pathExpression.replace("//", "/"); pathExpression = RegistryUtils.getAbsolutePath(context.getRegistryContext(), pathExpression.replace("//", "/")); String endPointPath = pathExpression; /** * Fix for the REGISTRY-3052 : validation is to check the whether this invoked by ZIPWSDLMediaTypeHandler * Setting the registry and absolute paths to current session to avoid incorrect resource path entry in REG_LOG table */ if (CurrentSession.getLocalPathMap() != null && !Boolean.valueOf(CurrentSession.getLocalPathMap().get(CommonConstants.ARCHIEVE_UPLOAD))) { endPointPath = CommonUtil.getRegistryPath(context.getRegistry().getRegistryContext(),pathExpression); if (log.isDebugEnabled()) { log.debug("Saving current session local paths, key: " + endPointPath + " | value: " + pathExpression); } CurrentSession.getLocalPathMap().put(endPointPath, pathExpression); } return endPointPath; } else { String urlToPath = deriveEndpointFromUrl(url); String endpointAbsoluteBasePath = RegistryUtils.getAbsolutePath(context.getRegistryContext(), org.wso2.carbon.registry.core.RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + environment); if (!systemRegistry.resourceExists(endpointAbsoluteBasePath)) { systemRegistry.put(endpointAbsoluteBasePath, systemRegistry.newCollection()); } String endpointAbsolutePath = endpointAbsoluteBasePath + urlToPath; return endpointAbsolutePath; } } /** * Update endpoint content(XML structure) with properties coming though context, We assume those fields only in * overview section. * * @param context Request Context * @param content Current content of the endpoint * @return updated endpoint content */ private static String updateEndpointContent(RequestContext context, String content) { try { OMElement serviceElement = buildOMElement(content); String pathExpression = Utils.getRxtService().getStoragePath(CommonConstants.ENDPOINT_MEDIA_TYPE); CommonUtil.getPathFromPathExpression(pathExpression, context.getResource().getProperties(), serviceElement); return serviceElement.toString(); } catch (Exception e) { // If exception occurred while generating OMElement, then will return the original content } return content; } private static void saveEndpointValues(RequestContext context,Registry registry, String url, String associatedPath , Map<String, String> properties, Registry systemRegistry, String relativePath , String endpointAbsolutePath) throws RegistryException { Resource resource; String endpointId = null; if (registry.resourceExists(endpointAbsolutePath)) { resource = registry.get(endpointAbsolutePath); endpointId = resource.getUUID(); String existingContent; String newContent = updateEndpointContent(context, getEndpointContentWithOverview(url, endpointAbsolutePath, ((ResourceImpl) resource).getName(),endpointVersion)); if (resource.getContent() != null) { existingContent = new String((byte[]) (resource.getContent())); if (!existingContent.equals(newContent)) { resource.setContent(RegistryUtils.encodeString(newContent)); } } else { resource.setContent(RegistryUtils.encodeString(newContent)); } } else { resource = registry.newResource(); resource.setContent(RegistryUtils.encodeString(updateEndpointContent(context, getEndpointContentWithOverview(url,endpointAbsolutePath, deriveEndpointNameWithNamespaceFromUrl(url),endpointVersion)))); } boolean endpointIdCreated = false; if (endpointId == null) { endpointIdCreated = true; endpointId = UUID.randomUUID().toString(); resource.setUUID(endpointId); } // CommonUtil.addGovernanceArtifactEntryWithRelativeValues( // systemRegistry, endpointId, relativePath); boolean propertiesChanged = false; if (properties != null) { for (Map.Entry<String, String> e : properties.entrySet()) { propertiesChanged = true; resource.setProperty(e.getKey(), e.getValue()); } } if (endpointIdCreated || propertiesChanged) { // this will be definitely false for a brand new resource resource.setMediaType(endpointMediaType); registry.put(endpointAbsolutePath, resource); // we need to create a version here. } registry.addAssociation(associatedPath, endpointAbsolutePath, CommonConstants.DEPENDS); registry.addAssociation(endpointAbsolutePath, associatedPath, CommonConstants.USED_BY); } public static void addEndpointToService(Registry registry, String servicePath, String endpointUrl, String endpointEnv) throws RegistryException { Resource serviceResource = registry.get(servicePath); byte[] serviceBytes = (byte[])serviceResource.getContent(); String serviceContent = RegistryUtils.decodeBytes(serviceBytes); OMElement serviceElement; try { serviceElement = buildOMElement(serviceContent); } catch (Exception e) { String msg = "Failed building the service element. " + servicePath + "."; log.error(msg, e); throw new RegistryException(msg, e); } OMElement serviceEndpointElement; OMNamespace namespace = OMAbstractFactory.getOMFactory().createOMNamespace( CommonConstants.SERVICE_ELEMENT_NAMESPACE, null); try { List<OMElement> endpointElements = evaluateXPathToElements( SERVICE_ENDPOINT_EXPR, serviceElement); if (endpointElements.size() == 0) { // we need to create the element. serviceEndpointElement = OMAbstractFactory.getOMFactory().createOMElement( SERVICE_ENDPOINTS_ELEMENT, namespace); serviceElement.addChild(serviceEndpointElement); } else { serviceEndpointElement = endpointElements.get(0); } } catch (Exception e) { String msg = "Error in getting the endpoint element of the service. " + "service path: " + servicePath + "."; log.error(msg, e); throw new RegistryException(msg, e); } Iterator it = serviceEndpointElement.getChildElements(); List<String> currentEndpoints = new ArrayList<String>(); while(it.hasNext()){ currentEndpoints.add(((OMElement) it.next()).getText()); } if(!currentEndpoints.contains(endpointEnv + ":" + endpointUrl)){ OMElement entryElement = OMAbstractFactory.getOMFactory().createOMElement( SERVICE_ENDPOINTS_ENTRY_ELEMENT, namespace); entryElement.setText(endpointEnv + ":" + endpointUrl); serviceEndpointElement.addChild(entryElement); // now we are saving it to the registry. String serviceElementStr = serviceElement.toString(); serviceResource.setContent(RegistryUtils.encodeString(serviceElementStr)); registry.put(servicePath, serviceResource); } } public static OMElement addEndpointToService(OMElement serviceElement, String endpointUrl, String endpointEnv) throws RegistryException { OMElement serviceEndpointElement; OMNamespace namespace = OMAbstractFactory.getOMFactory().createOMNamespace( CommonConstants.SERVICE_ELEMENT_NAMESPACE, null); try { List<OMElement> endpointElements = evaluateXPathToElements(SERVICE_ENDPOINT_EXPR, serviceElement); if (endpointElements.size() == 0) { // we need to create the element. serviceEndpointElement = OMAbstractFactory.getOMFactory().createOMElement( SERVICE_ENDPOINTS_ELEMENT, namespace); serviceElement.addChild(serviceEndpointElement); } else { serviceEndpointElement = endpointElements.get(0); } } catch (Exception e) { String msg = "Error in getting the endpoint element of the service."; log.error(msg, e); throw new RegistryException(msg, e); } Iterator it = serviceEndpointElement.getChildElements(); List<String> currentEndpoints = new ArrayList<String>(); while (it.hasNext()) { currentEndpoints.add(((OMElement) it.next()).getText()); } if (!currentEndpoints.contains(endpointEnv + ":" + endpointUrl)) { OMElement entryElement = OMAbstractFactory.getOMFactory().createOMElement( SERVICE_ENDPOINTS_ENTRY_ELEMENT, namespace); entryElement.setText(endpointEnv + ":" + endpointUrl); serviceEndpointElement.addChild(entryElement); // now we are saving it to the registry. return serviceElement; } return serviceElement; } public static void removeEndpointFromService(Registry registry, String servicePath, String endpointUrl, String endpointEnv) throws RegistryException { Resource serviceResource = registry.get(servicePath); byte[] serviceBytes = (byte[])serviceResource.getContent(); String serviceContent = RegistryUtils.decodeBytes(serviceBytes); OMElement serviceElement; try { serviceElement = buildOMElement(serviceContent); } catch (Exception e) { String msg = "Failed building the service element. " + servicePath + "."; log.error(msg, e); throw new RegistryException(msg, e); } OMElement serviceEndpointElement; OMNamespace namespace = OMAbstractFactory.getOMFactory().createOMNamespace( CommonConstants.SERVICE_ELEMENT_NAMESPACE, null); try { List<OMElement> endpointElements = evaluateXPathToElements( SERVICE_ENDPOINT_EXPR, serviceElement); if (endpointElements.size() == 0) { // we need to create the element. serviceEndpointElement = OMAbstractFactory.getOMFactory().createOMElement( SERVICE_ENDPOINTS_ELEMENT, namespace); serviceElement.addChild(serviceEndpointElement); } else { serviceEndpointElement = endpointElements.get(0); } } catch (Exception e) { String msg = "Error in getting the endpoint element of the service. " + "service path: " + servicePath + "."; log.error(msg, e); throw new RegistryException(msg, e); } Iterator it = serviceEndpointElement.getChildElements(); while(it.hasNext()){ OMElement next = (OMElement) it.next(); if (next.getText().equals(endpointEnv + ":" + endpointUrl)) { next.detach(); // now we are saving it to the registry. String serviceElementStr = serviceElement.toString(); serviceResource.setContent(RegistryUtils.encodeString(serviceElementStr)); registry.put(servicePath, serviceResource); break; } } } /** * Returns an endpoint path for the url without the starting '/' * @param url the endpoint url * @return the path */ public static String deriveEndpointFromUrl(String url) { if (StringUtils.isBlank(url)) { throw new IllegalArgumentException("Invalid arguments supplied for derive endpoint name from url."); } String[] temp = url.split("[?]")[0].split("/"); StringBuffer sb = new StringBuffer(); for(int i=0; i<temp.length-1; i++){ sb.append(temp[i]).append("/"); } String urlToPath = CommonUtil.derivePathFragmentFromNamespace(sb.toString()); // excluding extra slashes. if (urlToPath.length() > 1) { urlToPath = urlToPath.substring(1, urlToPath.length() - 1); } urlToPath += "/" + deriveEndpointNameFromUrl(url); return urlToPath; } /** * Returns an endpoint path for the url without the starting '/' * @param url the endpoint url * @return the path */ public static String deriveEndpointNamespaceFromUrl(String url) { if (StringUtils.isBlank(url)) { throw new IllegalArgumentException("Invalid arguments supplied for derive endpoint name from url."); } String[] temp = url.split("[?]")[0].split("/"); StringBuffer sb = new StringBuffer(); for(int i=0; i<temp.length-1; i++){ sb.append(temp[i]).append("/"); } String urlToPath = CommonUtil.derivePathFragmentFromNamespace(sb.toString()); // excluding extra slashes. if (urlToPath.length() > 1) { urlToPath = urlToPath.substring(1, urlToPath.length() - 1); } return urlToPath; } /** * Returns an endpoint name with ENDPOINT_RESOURCE_PREFIX * * @param url the endpoint url * @return (ENDPOINT_RESOURCE_PREFIX + name) populated resource name */ public static String deriveEndpointNameFromUrl(String url) { if (StringUtils.isBlank(url)) { throw new IllegalArgumentException("Invalid arguments supplied for derive endpoint name from url."); } String tempURL = url; if (tempURL.startsWith("jms:/")) { tempURL = tempURL.split("[?]")[0]; } String name = tempURL.split("/")[tempURL.split("/").length - 1].replace(".", "-"). replace("=", "-").replace("@", "-").replace("#", "-").replace("~", "-"); return ENDPOINT_RESOURCE_PREFIX + name; } /** * Returns an endpoint name with namespace and ENDPOINT_RESOURCE_PREFIX * * @param url the endpoint url * @return (ENDPOINT_RESOURCE_PREFIX + namespace + name) populated resource name */ public static String deriveEndpointNameWithNamespaceFromUrl(String url) { if (StringUtils.isBlank(url)) { throw new IllegalArgumentException("Invalid arguments supplied for derive endpoint name from url."); } String tempURL = url; if (tempURL.startsWith("jms:/")) { tempURL = tempURL.split("[?]")[0]; } String name = tempURL.split("/")[tempURL.split("/").length - 1].replace(".", "-"). replace("=", "-").replace("@", "-").replace("#", "-").replace("~", "-"); String namespace = deriveEndpointNamespaceFromUrl(url).replace("//", "/"); namespace = namespace.replace("/", "."); namespace += "-"; if (isIncludeNamespaceInName()){ return ENDPOINT_RESOURCE_PREFIX + namespace +name; } else { return ENDPOINT_RESOURCE_PREFIX + name; } } /** * Create the endpoint content * This method is replaced by getEndpointContentWithOverview() below. * * @param endpoint endpoint URI * @param path endpoint location in the registry * @return * @throws RegistryException */ @Deprecated public static String getEndpointContent(String endpoint, String path) throws RegistryException { if (StringUtils.isBlank(endpoint) || StringUtils.isBlank(path)) { throw new IllegalArgumentException("Invalid arguments supplied for derive endpoint name from url."); } path = setFullPath(path); OMFactory factory = OMAbstractFactory.getOMFactory(); OMElement endpointElement = factory .createOMElement(new QName(ENDPOINT_ELEMENT_NAMESPACE, SYNAPSE_ENDPOINT, null)); endpointElement.addAttribute(SYNAPSE_ENDPOINT_NAME_ATTRIBUTE, path, null); OMElement address = factory.createOMElement(new QName(SYNAPSE_ENDPOINT_ADDRESS)); address.addAttribute(SYNAPSE_ENDPOINT_ADDRESS_URI_ATTRIBUTE, endpoint, null); endpointElement.addChild(address); return endpointElement.toString(); } /** * Create the endpoint content with name and version * * @param endpoint endpoint URI * @param path endpoint location in the registry * @param name resource name * @param version resource version * @return OMElement.toString() * @throws RegistryException */ public static String getEndpointContentWithOverview(String endpoint, String path, String name, String version) throws RegistryException { if (isArgumentsNull(endpoint, path, name, version)) { throw new IllegalArgumentException("Invalid arguments supplied for content creation."); } path = setFullPath(path); OMFactory factory = OMAbstractFactory.getOMFactory(); OMElement endpointElement = factory.createOMElement(new QName(SYNAPSE_ENDPOINT)); // Workaround for manually set xml namespace value. endpointElement.addAttribute(ENDPOINT_NAMESPACE_ATTRIBUTE, ENDPOINT_ELEMENT_NAMESPACE, null); //endpointElement.addAttribute(SYNAPSE_ENDPOINT_NAME_ATTRIBUTE, path, null); OMElement endpointElementOverview = factory.createOMElement(new QName(SYNAPSE_ENDPOINT_OVERVIEW)); OMElement overviewName = factory.createOMElement(new QName(SYNAPSE_ENDPOINT_NAME)); overviewName.setText(name); OMElement overviewVersion = factory.createOMElement(new QName(SYNAPSE_ENDPOINT_VERSION)); overviewVersion.setText(version); OMElement overviewAddress = factory.createOMElement(new QName(SYNAPSE_ENDPOINT_ADDRESS)); overviewAddress.setText(endpoint); endpointElementOverview.addChild(overviewName); endpointElementOverview.addChild(overviewVersion); endpointElementOverview.addChild(overviewAddress); endpointElement.addChild(endpointElementOverview); return endpointElement.toString(); } /** * Create the endpoint content with name and version * * @param path endpoint location in the registry * @return path simplified concatenated path */ private static String setFullPath(String path) { if (path.startsWith(RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH)) { path = "gov/" + path.substring((RegistryConstants. GOVERNANCE_REGISTRY_BASE_PATH + ENDPOINT_DEFAULT_LOCATION).length()); } else { path = "gov/" + path; } return path; } /** * Extract endpoint URL from content * * @param endpointContent endpoint content * @return addressElement.getText() String endpoint content * @throws RegistryException */ public static String deriveEndpointFromContent(String endpointContent) throws RegistryException { if (StringUtils.isBlank(endpointContent)) { throw new IllegalArgumentException("Invalid arguments supplied for derive endpoint from content."); } try { OMElement endpointElement = AXIOMUtil.stringToOM(endpointContent); OMElement overviewElement = endpointElement .getFirstChildWithName(new QName(ENDPOINT_ELEMENT_NAMESPACE, SYNAPSE_ENDPOINT_OVERVIEW)); OMElement addressElement = overviewElement .getFirstChildWithName(new QName(ENDPOINT_ELEMENT_NAMESPACE, SYNAPSE_ENDPOINT_ADDRESS)); return addressElement.getText(); } catch (XMLStreamException e) { throw new RegistryException("Invalid endpoint content", e); } } /** * Extract endpoint version from content * * @param endpointContent endpoint content * @return addressElement.getText() String endpoint version * @throws RegistryException */ public static String deriveVersionFromContent(String endpointContent) throws RegistryException { if (StringUtils.isBlank(endpointContent)) { throw new IllegalArgumentException("Invalid arguments supplied for derive endpoint version from content."); } try { OMElement endpointElement = AXIOMUtil.stringToOM(endpointContent); OMElement overviewElement = endpointElement .getFirstChildWithName(new QName(ENDPOINT_ELEMENT_NAMESPACE, SYNAPSE_ENDPOINT_OVERVIEW)); OMElement addressElement = overviewElement .getFirstChildWithName(new QName(ENDPOINT_ELEMENT_NAMESPACE, SYNAPSE_ENDPOINT_VERSION)); return addressElement.getText(); } catch (XMLStreamException e) { throw new RegistryException("Invalid endpoint content", e); } } /** * Extract endpoint name from content * * @param endpointContent endpoint content * @return addressElement.getText() String endpoint name * @throws RegistryException */ public static String deriveNameFromContent(String endpointContent) throws RegistryException { if (StringUtils.isBlank(endpointContent)) { throw new IllegalArgumentException("Invalid arguments supplied for derive endpoint name from content."); } try { OMElement endpointElement = AXIOMUtil.stringToOM(endpointContent); OMElement overviewElement = endpointElement .getFirstChildWithName(new QName(ENDPOINT_ELEMENT_NAMESPACE, SYNAPSE_ENDPOINT_OVERVIEW)); OMElement addressElement = overviewElement .getFirstChildWithName(new QName(ENDPOINT_ELEMENT_NAMESPACE, SYNAPSE_ENDPOINT_NAME)); return addressElement.getText(); } catch (XMLStreamException e) { throw new RegistryException("Invalid endpoint content", e); } } /** * Check whether all the parameters are null or not * "null" is considered as a valid string. * * @param value1,value2,value3,value4 argument String values * @return boolean value of isBlank() */ private static boolean isArgumentsNull(String value1, String value2, String value3, String value4) { return StringUtils.isBlank(value1) || StringUtils.isBlank(value2) || StringUtils.isBlank(value3) || StringUtils.isBlank(value4); } public static OMElement deriveOMElementContent(String endpointContent) throws RegistryException { if (StringUtils.isBlank(endpointContent)) { throw new IllegalArgumentException("Invalid arguments supplied for derive endpoint name from content."); } try { OMElement endpointElement = AXIOMUtil.stringToOM(endpointContent); return endpointElement; } catch (XMLStreamException e) { throw new RegistryException("Invalid endpoint content", e); } } }
package com.gdgkoreaandroid.multiscreencodelab.tv; import android.app.Activity; import android.media.MediaPlayer; import android.media.MediaPlayer.OnErrorListener; import android.media.MediaPlayer.OnPreparedListener; import android.os.Bundle; import android.os.Handler; import android.util.DisplayMetrics; import android.util.Log; import android.view.KeyEvent; import android.view.MenuItem; import android.view.View; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.SeekBar; import android.widget.TextView; import android.widget.VideoView; import com.gdgkoreaandroid.multiscreencodelab.R; import com.gdgkoreaandroid.multiscreencodelab.data.Movie; import com.gdgkoreaandroid.multiscreencodelab.data.MovieList; import java.util.Timer; import java.util.TimerTask; /** * FullScreen video player activity with basic media control capabilities. */ public class TvPlayerActivity extends Activity { private static final String TAG = "TvPlayerActivity"; private static final int HIDE_CONTROLLER_TIME = 5000; private static final int SEEKBAR_DELAY_TIME = 100; private static final int SEEKBAR_INTERVAL_TIME = 1000; private static final int MIN_SCRUB_TIME = 3000; private static final int SCRUB_SEGMENT_DIVISOR = 30; private static final double MEDIA_BAR_TOP_MARGIN = 0.8; private static final double MEDIA_BAR_RIGHT_MARGIN = 0.2; private static final double MEDIA_BAR_BOTTOM_MARGIN = 0.0; private static final double MEDIA_BAR_LEFT_MARGIN = 0.2; private static final double MEDIA_BAR_HEIGHT = 0.1; private static final double MEDIA_BAR_WIDTH = 0.9; private VideoView mVideoView; private TextView mStartText; private TextView mEndText; private SeekBar mSeekbar; private ImageView mPlayPause; private ProgressBar mLoading; private View mControllers; private Timer mSeekbarTimer; private Timer mControllersTimer; private PlaybackState mPlaybackState; private final Handler mHandler = new Handler(); private boolean mControlersVisible; private int mDuration; private DisplayMetrics mMetrics; /* * List of various states that we can be in */ public static enum PlaybackState { PLAYING, PAUSED, BUFFERING, IDLE } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_player); mMetrics = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(mMetrics); loadViews(); setupController(); setupControlsCallbacks(); startVideoPlayer(); updateMetadata(); } @Override protected void onPause() { super.onPause(); Log.d(TAG, "onPause() was called"); if (null != mSeekbarTimer) { mSeekbarTimer.cancel(); mSeekbarTimer = null; } if (null != mControllersTimer) { mControllersTimer.cancel(); } mVideoView.pause(); mPlaybackState = PlaybackState.PAUSED; updatePlayButton(PlaybackState.PAUSED); } @Override protected void onDestroy() { stopControllersTimer(); stopSeekBarTimer(); super.onDestroy(); } private void startVideoPlayer() { Bundle b = getIntent().getExtras(); long movidId = getIntent().getLongExtra(MovieList.ARG_ITEM_ID, MovieList.INVALID_ID); Movie movie = MovieList.getMovie(movidId); if (movie != null) { boolean shouldStartPlayback = b.getBoolean(getResources().getString(R.string.should_start)); int startPosition = b.getInt(getResources().getString(R.string.start_position), 0); mVideoView.setVideoPath(movie.getVideoUrl()); if (shouldStartPlayback) { mPlaybackState = PlaybackState.PLAYING; updatePlayButton(mPlaybackState); if (startPosition > 0) { mVideoView.seekTo(startPosition); } mVideoView.start(); mPlayPause.requestFocus(); startControllersTimer(); } else { updatePlaybackLocation(); mPlaybackState = PlaybackState.PAUSED; updatePlayButton(mPlaybackState); } } } private void updatePlaybackLocation() { if (mPlaybackState == PlaybackState.PLAYING || mPlaybackState == PlaybackState.BUFFERING) { startControllersTimer(); } else { stopControllersTimer(); } } private void play(int position) { startControllersTimer(); mVideoView.seekTo(position); mVideoView.start(); restartSeekBarTimer(); } private void stopSeekBarTimer() { Log.d(TAG, "Stopped TrickPlay Timer"); if (null != mSeekbarTimer) { mSeekbarTimer.cancel(); } } private void restartSeekBarTimer() { stopSeekBarTimer(); mSeekbarTimer = new Timer(); mSeekbarTimer.scheduleAtFixedRate(new UpdateSeekbarTask(), SEEKBAR_DELAY_TIME, SEEKBAR_INTERVAL_TIME); } private void stopControllersTimer() { if (null != mControllersTimer) { mControllersTimer.cancel(); } } private void startControllersTimer() { if (null != mControllersTimer) { mControllersTimer.cancel(); } mControllersTimer = new Timer(); mControllersTimer.schedule(new HideControllersTask(), HIDE_CONTROLLER_TIME); } private void updateControlersVisibility(boolean show) { if (show) { mControllers.setVisibility(View.VISIBLE); } else { mControllers.setVisibility(View.INVISIBLE); } } private class HideControllersTask extends TimerTask { @Override public void run() { mHandler.post(new Runnable() { @Override public void run() { updateControlersVisibility(false); mControlersVisible = false; } }); } } private class UpdateSeekbarTask extends TimerTask { @Override public void run() { mHandler.post(new Runnable() { @Override public void run() { int currentPos = mVideoView.getCurrentPosition(); updateSeekbar(currentPos, mDuration); } }); } } private void setupController() { int w = (int) (mMetrics.widthPixels * MEDIA_BAR_WIDTH); int h = (int) (mMetrics.heightPixels * MEDIA_BAR_HEIGHT); int marginLeft = (int) (mMetrics.widthPixels * MEDIA_BAR_LEFT_MARGIN); int marginTop = (int) (mMetrics.heightPixels * MEDIA_BAR_TOP_MARGIN); int marginRight = (int) (mMetrics.widthPixels * MEDIA_BAR_RIGHT_MARGIN); int marginBottom = (int) (mMetrics.heightPixels * MEDIA_BAR_BOTTOM_MARGIN); RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(w, h); lp.setMargins(marginLeft, marginTop, marginRight, marginBottom); mControllers.setLayoutParams(lp); mStartText.setText(getResources().getString(R.string.init_text)); mEndText.setText(getResources().getString(R.string.init_text)); } private void setupControlsCallbacks() { mVideoView.setOnErrorListener(new OnErrorListener() { @Override public boolean onError(MediaPlayer mp, int what, int extra) { mVideoView.stopPlayback(); mPlaybackState = PlaybackState.IDLE; return false; } }); mVideoView.setOnPreparedListener(new OnPreparedListener() { @Override public void onPrepared(MediaPlayer mp) { Log.d(TAG, "onPrepared is reached"); mDuration = mp.getDuration(); mEndText.setText(formatMillis(mDuration)); mSeekbar.setMax(mDuration); restartSeekBarTimer(); } }); } /* * @Override public boolean onKeyDown(int keyCode, KeyEvent event) { return * super.onKeyDown(keyCode, event); } */ @Override public boolean onKeyDown(int keyCode, KeyEvent event) { int currentPos; int delta = mDuration / SCRUB_SEGMENT_DIVISOR; if (delta < MIN_SCRUB_TIME) delta = MIN_SCRUB_TIME; Log.v("keycode", "duration " + mDuration + " delta:" + delta); if (!mControlersVisible) { updateControlersVisibility(true); } switch (keyCode) { case KeyEvent.KEYCODE_DPAD_CENTER: return true; case KeyEvent.KEYCODE_DPAD_DOWN: return true; case KeyEvent.KEYCODE_DPAD_LEFT: currentPos = mVideoView.getCurrentPosition(); currentPos -= delta; if (currentPos > 0) play(currentPos); return true; case KeyEvent.KEYCODE_DPAD_RIGHT: currentPos = mVideoView.getCurrentPosition(); currentPos += delta; if (currentPos < mDuration) play(currentPos); return true; case KeyEvent.KEYCODE_DPAD_UP: return true; } return super.onKeyDown(keyCode, event); } private void updateSeekbar(int position, int duration) { mSeekbar.setProgress(position); mSeekbar.setMax(duration); mStartText.setText(formatMillis(position)); mEndText.setText(formatMillis(duration)); } private void updatePlayButton(PlaybackState state) { switch (state) { case PLAYING: mLoading.setVisibility(View.INVISIBLE); mPlayPause.setVisibility(View.VISIBLE); mPlayPause.setImageDrawable( getResources().getDrawable(R.drawable.ic_pause_playcontrol_normal)); break; case PAUSED: case IDLE: mLoading.setVisibility(View.INVISIBLE); mPlayPause.setVisibility(View.VISIBLE); mPlayPause.setImageDrawable( getResources().getDrawable(R.drawable.ic_play_playcontrol_normal)); break; case BUFFERING: mPlayPause.setVisibility(View.INVISIBLE); mLoading.setVisibility(View.VISIBLE); break; default: break; } } private void updateMetadata() { mVideoView.invalidate(); } @Override public boolean onOptionsItemSelected(MenuItem item) { return true; } /** * Formats time in milliseconds to hh:mm:ss string format. * * @param millis * @return */ private String formatMillis(int millis) { String result = ""; int hr = millis / 3600000; millis %= 3600000; int min = millis / 60000; millis %= 60000; int sec = millis / 1000; if (hr > 0) { result += hr + ":"; } if (min >= 0) { if (min > 9) { result += min + ":"; } else { result += "0" + min + ":"; } } if (sec > 9) { result += sec; } else { result += "0" + sec; } return result; } private void loadViews() { mVideoView = (VideoView) findViewById(R.id.videoView); mStartText = (TextView) findViewById(R.id.startText); mEndText = (TextView) findViewById(R.id.endText); mSeekbar = (SeekBar) findViewById(R.id.seekBar); mPlayPause = (ImageView) findViewById(R.id.playpause); mLoading = (ProgressBar) findViewById(R.id.progressBar); mControllers = findViewById(R.id.controllers); mVideoView.setOnClickListener(mPlayPauseHandler); } private final View.OnClickListener mPlayPauseHandler = new View.OnClickListener() { public void onClick(View v) { Log.d(TAG, "clicked play pause button"); if (!mControlersVisible) { updateControlersVisibility(true); } if (mPlaybackState == PlaybackState.PAUSED) { mPlaybackState = PlaybackState.PLAYING; updatePlayButton(mPlaybackState); mVideoView.start(); startControllersTimer(); } else { mVideoView.pause(); mPlaybackState = PlaybackState.PAUSED; updatePlayButton(PlaybackState.PAUSED); stopControllersTimer(); } } }; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Set; import org.apache.geode.cache.CacheWriter; import org.apache.geode.cache.CacheWriterException; import org.apache.geode.cache.DataPolicy; import org.apache.geode.cache.EntryEvent; import org.apache.geode.cache.EntryNotFoundException; import org.apache.geode.cache.Operation; import org.apache.geode.cache.TimeoutException; import org.apache.geode.cache.TransactionId; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.ByteArrayDataInput; import org.apache.geode.internal.InternalStatisticsDisabledException; import org.apache.geode.internal.Version; import org.apache.geode.internal.cache.AbstractRegionMap.ARMLockTestHook; import org.apache.geode.internal.cache.InitialImageOperation.Entry; import org.apache.geode.internal.cache.entries.DiskEntry; import org.apache.geode.internal.cache.eviction.EvictableEntry; import org.apache.geode.internal.cache.eviction.EvictionController; import org.apache.geode.internal.cache.eviction.EvictionList; import org.apache.geode.internal.cache.persistence.DiskRegionView; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; import org.apache.geode.internal.cache.versions.RegionVersionVector; import org.apache.geode.internal.cache.versions.VersionHolder; import org.apache.geode.internal.cache.versions.VersionSource; import org.apache.geode.internal.cache.versions.VersionStamp; import org.apache.geode.internal.cache.versions.VersionTag; import org.apache.geode.internal.offheap.annotations.Released; import org.apache.geode.internal.util.concurrent.ConcurrentMapWithReusableEntries; /** * Internal implementation of {@link RegionMap}for regions whose DataPolicy is proxy. Proxy maps are * always empty. * * @since GemFire 5.0 */ class ProxyRegionMap implements RegionMap { protected ProxyRegionMap(LocalRegion owner, Attributes attr, InternalRegionArguments internalRegionArgs) { this.owner = owner; this.attr = attr; } /** * the region that owns this map */ private final LocalRegion owner; private final Attributes attr; @Override public RegionEntryFactory getEntryFactory() { throw new UnsupportedOperationException(); } @Override public Attributes getAttributes() { return this.attr; } @Override public void changeOwner(LocalRegion r) { throw new UnsupportedOperationException(); } @Override public int size() { return 0; } @Override public boolean isEmpty() { return true; } @Override public Set keySet() { return Collections.emptySet(); } @Override public Collection<RegionEntry> regionEntries() { return Collections.emptySet(); } @Override public Collection<RegionEntry> regionEntriesInVM() { return Collections.emptySet(); } @Override public boolean containsKey(Object key) { return false; } @Override public RegionEntry getEntry(Object key) { return null; } @Override public RegionEntry putEntryIfAbsent(Object key, RegionEntry re) { return null; } @Override @SuppressWarnings({"rawtypes", "unchecked"}) public Set<VersionSource> clear(RegionVersionVector rvv, BucketRegion bucketRegion) { // nothing needs to be done RegionVersionVector v = this.owner.getVersionVector(); if (v != null) { return v.getDepartedMembersSet(); } else { return Collections.emptySet(); } } public void diskClear() { // nothing needs to be done } @Override public RegionEntry initRecoveredEntry(Object key, DiskEntry.RecoveredEntry value) { throw new UnsupportedOperationException(); } @Override public RegionEntry updateRecoveredEntry(Object key, DiskEntry.RecoveredEntry value) { throw new UnsupportedOperationException(); } /** * Used to modify an existing RegionEntry or create a new one when processing the values obtained * during a getInitialImage. */ @Override public boolean initialImagePut(Object key, long lastModified, Object newValue, boolean wasRecovered, boolean deferLRUCallback, VersionTag entryVersion, InternalDistributedMember sender, boolean forceValue) { throw new UnsupportedOperationException(); } @Override public boolean destroy(EntryEventImpl event, boolean inTokenMode, boolean duringRI, boolean cacheWrite, boolean isEviction, Object expectedOldValue, boolean removeRecoveredEntry) throws CacheWriterException, EntryNotFoundException, TimeoutException { if (event.getOperation().isLocal()) { throw new EntryNotFoundException(event.getKey().toString()); } if (cacheWrite) { this.owner.cacheWriteBeforeDestroy(event, expectedOldValue); } owner.recordEvent(event); this.owner.basicDestroyPart2(markerEntry, event, inTokenMode, false /* Clear conflict occurred */, duringRI, true); this.owner.basicDestroyPart3(markerEntry, event, inTokenMode, duringRI, true, expectedOldValue); return true; } @Override public boolean invalidate(EntryEventImpl event, boolean invokeCallbacks, boolean forceNewEntry, boolean forceCallbacks) throws EntryNotFoundException { if (event.getOperation().isLocal()) { if (this.owner.isInitialized()) { AbstractRegionMap.forceInvalidateEvent(event, this.owner); } throw new EntryNotFoundException(event.getKey().toString()); } this.owner.serverInvalidate(event); this.owner.recordEvent(event); this.owner.basicInvalidatePart2(markerEntry, event, false /* Clear conflict occurred */, true); this.owner.basicInvalidatePart3(markerEntry, event, true); return true; } public void evictEntry(Object key) { // noop } @Override public void evictValue(Object key) { // noop } /** * Used by basicPut to signal the caller that the put was successful. */ private static final RegionEntry markerEntry = new ProxyRegionEntry(); @Override public RegionEntry basicPut(EntryEventImpl event, long lastModified, boolean ifNew, boolean ifOld, Object expectedOldValue, boolean requireOldValue, boolean overwriteDestroyed) throws CacheWriterException, TimeoutException { if (!event.isOriginRemote() && event.getOperation() != Operation.REPLACE) { // bug 42167 - don't // convert replace // to CREATE event.makeCreate(); } final CacheWriter cacheWriter = this.owner.basicGetWriter(); final boolean cacheWrite = !event.isOriginRemote() && !event.isNetSearch() && !event.getInhibitDistribution() && event.isGenerateCallbacks() && (cacheWriter != null || this.owner.hasServerProxy() || this.owner.scope.isDistributed()); if (cacheWrite) { final Set netWriteRecipients; if (cacheWriter == null && this.owner.scope.isDistributed()) { CacheDistributionAdvisor cda = ((DistributedRegion) this.owner).getDistributionAdvisor(); netWriteRecipients = cda.adviseNetWrite(); } else { netWriteRecipients = null; } if (event.getOperation() != Operation.REPLACE) { // bug #42167 - makeCreate() causes REPLACE // to eventually become UPDATE event.makeCreate(); } this.owner.cacheWriteBeforePut(event, netWriteRecipients, cacheWriter, requireOldValue, expectedOldValue); } owner.recordEvent(event); lastModified = // fix for bug 40129 this.owner.basicPutPart2(event, markerEntry, true, lastModified, false /* Clear conflict occurred */); this.owner.basicPutPart3(event, markerEntry, true, lastModified, true, ifNew, ifOld, expectedOldValue, requireOldValue); return markerEntry; } @Override public void writeSyncIfPresent(Object key, Runnable runner) { // nothing needed } @Override public void removeIfDestroyed(Object key) { // nothing needed } @Override public void txApplyDestroy(Object key, TransactionId rmtOrigin, TXRmtEvent event, boolean inTokenMode, boolean inRI, Operation op, EventID eventId, Object aCallbackArgument, List<EntryEventImpl> pendingCallbacks, FilterRoutingInfo filterRoutingInfo, ClientProxyMembershipID bridgeContext, boolean isOperationRemote, TXEntryState txEntryState, VersionTag versionTag, long tailKey) { this.owner.txApplyDestroyPart2(markerEntry, key, inTokenMode, false /* Clear conflict occurred */, false); if (!inTokenMode) { if (event != null) { event.addDestroy(this.owner, markerEntry, key, aCallbackArgument); } if (AbstractRegionMap.shouldInvokeCallbacks(this.owner, !inTokenMode)) { // fix for bug 39526 @Released EntryEventImpl e = AbstractRegionMap.createCallbackEvent(this.owner, op, key, null, rmtOrigin, event, eventId, aCallbackArgument, filterRoutingInfo, bridgeContext, txEntryState, versionTag, tailKey); AbstractRegionMap.switchEventOwnerAndOriginRemote(e, txEntryState == null); pendingCallbacks.add(e); } } } @Override public void txApplyInvalidate(Object key, Object newValue, boolean didDestroy, TransactionId rmtOrigin, TXRmtEvent event, boolean localOp, EventID eventId, Object aCallbackArgument, List<EntryEventImpl> pendingCallbacks, FilterRoutingInfo filterRoutingInfo, ClientProxyMembershipID bridgeContext, TXEntryState txEntryState, VersionTag versionTag, long tailKey) { this.owner.txApplyInvalidatePart2(markerEntry, key, didDestroy, true); if (this.owner.isInitialized()) { if (event != null) { event.addInvalidate(this.owner, markerEntry, key, newValue, aCallbackArgument); } if (AbstractRegionMap.shouldInvokeCallbacks(this.owner, this.owner.isInitialized())) { // fix for bug 39526 @Released EntryEventImpl e = AbstractRegionMap.createCallbackEvent(this.owner, localOp ? Operation.LOCAL_INVALIDATE : Operation.INVALIDATE, key, newValue, rmtOrigin, event, eventId, aCallbackArgument, filterRoutingInfo, bridgeContext, txEntryState, versionTag, tailKey); AbstractRegionMap.switchEventOwnerAndOriginRemote(e, txEntryState == null); pendingCallbacks.add(e); } } } @Override public void txApplyPut(Operation putOp, Object key, Object newValue, boolean didDestroy, TransactionId rmtOrigin, TXRmtEvent event, EventID eventId, Object aCallbackArgument, List<EntryEventImpl> pendingCallbacks, FilterRoutingInfo filterRoutingInfo, ClientProxyMembershipID bridgeContext, TXEntryState txEntryState, VersionTag versionTag, long tailKey) { Operation putOperation = putOp.getCorrespondingCreateOp(); long lastMod = owner.cacheTimeMillis(); this.owner.txApplyPutPart2(markerEntry, key, lastMod, true, didDestroy, false); if (this.owner.isInitialized()) { if (event != null) { event.addPut(putOperation, this.owner, markerEntry, key, newValue, aCallbackArgument); } if (AbstractRegionMap.shouldInvokeCallbacks(this.owner, this.owner.isInitialized())) { // fix for bug 39526 @Released EntryEventImpl e = AbstractRegionMap.createCallbackEvent(this.owner, putOperation, key, newValue, rmtOrigin, event, eventId, aCallbackArgument, filterRoutingInfo, bridgeContext, txEntryState, versionTag, tailKey); AbstractRegionMap.switchEventOwnerAndOriginRemote(e, txEntryState == null); pendingCallbacks.add(e); } } } // LRUMapCallbacks methods @Override public void lruUpdateCallback() { // nothing needed } @Override public boolean disableLruUpdateCallback() { // nothing needed return false; } @Override public void enableLruUpdateCallback() { // nothing needed } @Override public void decTxRefCount(RegionEntry e) { // nothing needed } @Override public boolean lruLimitExceeded(DiskRegionView diskRegionView) { return false; } @Override public void lruCloseStats() { // nothing needed } @Override public void resetThreadLocals() { // nothing needed } @Override public void removeEntry(Object key, RegionEntry value, boolean updateStats) { // nothing to do } @Override public void removeEntry(Object key, RegionEntry re, boolean updateStat, EntryEventImpl event, InternalRegion owner) { // nothing to do } /** * Provides a dummy implementation of RegionEntry so that basicPut can return an instance that * make the upper levels think it did the put. */ public static class ProxyRegionEntry implements RegionEntry { @Override public long getLastModified() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public long getLastAccessed() throws InternalStatisticsDisabledException { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public long getHitCount() throws InternalStatisticsDisabledException { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public long getMissCount() throws InternalStatisticsDisabledException { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public VersionStamp getVersionStamp() { return null; } @Override public boolean isTombstone() { return false; } @Override public VersionTag generateVersionTag(VersionSource member, boolean withDelta, InternalRegion region, EntryEventImpl event) { return null; // proxies don't do versioning } public void processVersionTag(EntryEvent ev) { return; } @Override public void makeTombstone(InternalRegion region, VersionTag version) { return; } @Override public void updateStatsForPut(long lastModifiedTime, long lastAccessedTime) { // do nothing; called by LocalRegion.updateStatsForPut } @Override public void setRecentlyUsed(RegionEntryContext context) { // do nothing; called by LocalRegion.updateStatsForPut } @Override public void updateStatsForGet(boolean hit, long time) { // do nothing; no entry stats } @Override public void txDidDestroy(long currentTime) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public void resetCounts() throws InternalStatisticsDisabledException { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public void removePhase1(InternalRegion region, boolean clear) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public void removePhase2() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isRemoved() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isRemovedPhase2() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean fillInValue(InternalRegion region, Entry entry, ByteArrayDataInput in, DistributionManager distributionManager, final Version version) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isOverflowedToDisk(InternalRegion region, DistributedRegion.DiskPosition diskPosition) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object getKey() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object getValue(RegionEntryContext context) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object getValueRetain(RegionEntryContext context) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public void setValue(RegionEntryContext context, Object value) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object prepareValueForCache(RegionEntryContext context, Object value, boolean isEntryUpdate) { throw new IllegalStateException("Should never be called"); } @Override public Object getValue() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Token getValueAsToken() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object getValueRetain(RegionEntryContext context, boolean decompress) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object getTransformedValue() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object getValueInVM(RegionEntryContext context) { return null; // called by TXRmtEvent.createEvent } @Override public Object getValueOnDisk(InternalRegion region) throws EntryNotFoundException { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object getValueOnDiskOrBuffer(InternalRegion region) throws EntryNotFoundException { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object getSerializedValueOnDisk(InternalRegion region) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean initialImagePut(InternalRegion region, long lastModified, Object newValue, boolean wasRecovered, boolean acceptedVersionTag) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean initialImageInit(InternalRegion region, long lastModified, Object newValue, boolean create, boolean wasRecovered, boolean acceptedVersionTag) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean destroy(InternalRegion region, EntryEventImpl event, boolean inTokenMode, boolean cacheWrite, Object expectedOldValue, boolean forceDestroy, boolean removeRecoveredEntry) throws CacheWriterException, EntryNotFoundException, TimeoutException { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean getValueWasResultOfSearch() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public void setValueResultOfSearch(boolean value) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean dispatchListenerEvents(EntryEventImpl event) throws InterruptedException { // note that we don't synchronize on the RE before dispatching events event.invokeCallbacks(event.getRegion(), event.inhibitCacheListenerNotification(), false); return true; } @Override public boolean hasStats() { return false; } @Override public Object getValueInVMOrDiskWithoutFaultIn(InternalRegion region) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public Object getValueOffHeapOrDiskWithoutFaultIn(InternalRegion region) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isUpdateInProgress() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public void setUpdateInProgress(boolean underUpdate) { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isValueNull() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isInvalid() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isDestroyed() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public void setValueToNull() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isInvalidOrRemoved() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isDestroyedOrRemoved() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isDestroyedOrRemovedButNotTombstone() { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public void returnToPool() { // nothing } @Override public void setValueWithTombstoneCheck(Object value, EntryEvent event) throws RegionClearedException { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isCacheListenerInvocationInProgress() { return false; } @Override public void setCacheListenerInvocationInProgress(boolean isListenerInvoked) { // nothing } @Override public void setValue(RegionEntryContext context, Object value, EntryEventImpl event) throws RegionClearedException { throw new UnsupportedOperationException( String.format("No entry support on regions with DataPolicy %s", DataPolicy.EMPTY)); } @Override public boolean isInUseByTransaction() { return false; } @Override public void incRefCount() { // nothing } @Override public void decRefCount(EvictionList lruList, InternalRegion region) { // nothing } @Override public void resetRefCount(EvictionList lruList) { // nothing } @Override public Object prepareValueForCache(RegionEntryContext context, Object value, EntryEventImpl event, boolean isEntryUpdate) { throw new IllegalStateException("Should never be called"); } @Override public boolean isEvicted() { return false; } } @Override public void lruEntryFaultIn(EvictableEntry entry) { // do nothing. } @Override public void copyRecoveredEntries(RegionMap rm) { throw new IllegalStateException("copyRecoveredEntries should never be called on proxy"); } @Override public boolean removeTombstone(RegionEntry re, VersionHolder destroyedVersion, boolean isEviction, boolean isScheduledTombstone) { throw new IllegalStateException("removeTombstone should never be called on a proxy"); } @Override public boolean isTombstoneNotNeeded(RegionEntry re, int destroyedVersion) { throw new IllegalStateException("removeTombstone should never be called on a proxy"); } @Override public void setEntryFactory(RegionEntryFactory f) { throw new IllegalStateException("Should not be called on a ProxyRegionMap"); } @Override public void updateEntryVersion(EntryEventImpl event) { // Do nothing. Not applicable for clients. } @Override public RegionEntry getEntryInVM(Object key) { return null; } @Override public RegionEntry getOperationalEntryInVM(Object key) { return null; } @Override public int sizeInVM() { return 0; } @Override public void close(BucketRegion bucketRegion) { // nothing } @Override public ARMLockTestHook getARMLockTestHook() { return null; } @Override public long getEvictions() { return 0; } @Override public void incRecentlyUsed() { // nothing } @Override public EvictionController getEvictionController() { return null; } @Override public int getEntryOverhead() { return 0; } @Override public boolean beginChangeValueForm(EvictableEntry le, CachedDeserializable vmCachedDeserializable, Object v) { return false; } @Override public void finishChangeValueForm() {} @Override public int centralizedLruUpdateCallback() { return 0; } @Override public void updateEvictionCounter() {} @Override public ConcurrentMapWithReusableEntries<Object, Object> getCustomEntryConcurrentHashMap() { return null; } @Override public void setEntryMap(ConcurrentMapWithReusableEntries<Object, Object> map) { } }
/* * Copyright 2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.spockframework.runtime.model; import java.util.*; import org.spockframework.runtime.*; import org.spockframework.runtime.extension.IMethodInterceptor; import org.spockframework.util.*; /** * Runtime information about a Spock specification. * * @author Peter Niederwieser */ public class SpecInfo extends SpecElementInfo<NodeInfo, Class<?>> implements IMethodNameMapper { private final List<FieldInfo> fields = new ArrayList<FieldInfo>(); private final List<IMethodInterceptor> setupInterceptors = new ArrayList<IMethodInterceptor>(); private final List<IMethodInterceptor> cleanupInterceptors = new ArrayList<IMethodInterceptor>(); private final List<IMethodInterceptor> setupSpecInterceptors = new ArrayList<IMethodInterceptor>(); private final List<IMethodInterceptor> cleanupSpecInterceptors = new ArrayList<IMethodInterceptor>(); private final List<IMethodInterceptor> sharedInitializerInterceptors = new ArrayList<IMethodInterceptor>(); private final List<IMethodInterceptor> initializerInterceptors = new ArrayList<IMethodInterceptor>(); private final List<IRunListener> listeners = new ArrayList<IRunListener>(); private String pkg; private String filename; private String narrative; private SpecInfo superSpec; private SpecInfo subSpec; private List<SpecInfo> specsTopToBottom; private List<SpecInfo> specsBottomToTop; private MethodInfo initializerMethod; private MethodInfo sharedInitializerMethod; private final List<MethodInfo> setupMethods = new ArrayList<MethodInfo>(); private final List<MethodInfo> cleanupMethods = new ArrayList<MethodInfo>(); private final List<MethodInfo> setupSpecMethods = new ArrayList<MethodInfo>(); private final List<MethodInfo> cleanupSpecMethods = new ArrayList<MethodInfo>(); private final List<FeatureInfo> features = new ArrayList<FeatureInfo>(); public String getPackage() { return pkg; } public void setPackage(String pkg) { this.pkg = pkg; } public String getFilename() { return filename; } public void setFilename(String filename) { this.filename = filename; } public String getNarrative() { return narrative; } public void setNarrative(String narrative) { this.narrative = narrative; } public SpecInfo getSuperSpec() { return superSpec; } public void setSuperSpec(SpecInfo superSpec) { this.superSpec = superSpec; } public SpecInfo getSubSpec() { return subSpec; } public void setSubSpec(SpecInfo subSpec) { this.subSpec = subSpec; } public SpecInfo getTopSpec() { SpecInfo curr = this; while (curr.getSuperSpec() != null) curr = curr.getSuperSpec(); return curr; } public boolean getIsTopSpec() { return superSpec == null; } public SpecInfo getBottomSpec() { SpecInfo curr = this; while (curr.getSubSpec() != null) curr = curr.getSubSpec(); return curr; } public boolean getIsBottomSpec() { return subSpec == null; } public List<SpecInfo> getSpecsTopToBottom() { if (specsTopToBottom == null) { specsTopToBottom = new ArrayList<SpecInfo>(); SpecInfo curr = getTopSpec(); while (curr != null) { specsTopToBottom.add(curr); curr = curr.getSubSpec(); } } return specsTopToBottom; } public List<SpecInfo> getSpecsBottomToTop() { if (specsBottomToTop == null) { specsBottomToTop = new ArrayList<SpecInfo>(); SpecInfo curr = getBottomSpec(); while (curr != null) { specsBottomToTop.add(curr); curr = curr.getSuperSpec(); } } return specsBottomToTop; } public MethodInfo getInitializerMethod() { return initializerMethod; } public void setInitializerMethod(MethodInfo initializerMethod) { this.initializerMethod = initializerMethod; } public MethodInfo getSharedInitializerMethod() { return sharedInitializerMethod; } public void setSharedInitializerMethod(MethodInfo sharedInitializerMethod) { this.sharedInitializerMethod = sharedInitializerMethod; } public List<MethodInfo> getSetupMethods() { return setupMethods; } public void addSetupMethod(MethodInfo setupMethod) { setupMethods.add(setupMethod); } public List<MethodInfo> getCleanupMethods() { return cleanupMethods; } public void addCleanupMethod(MethodInfo cleanupMethod) { cleanupMethods.add(cleanupMethod); } public List<MethodInfo> getSetupSpecMethods() { return setupSpecMethods; } public void addSetupSpecMethod(MethodInfo setupSpecMethod) { setupSpecMethods.add(setupSpecMethod); } public List<MethodInfo> getCleanupSpecMethods() { return cleanupSpecMethods; } public void addCleanupSpecMethod(MethodInfo cleanupSpecMethod) { cleanupSpecMethods.add(cleanupSpecMethod); } @SuppressWarnings("unchecked") public Iterable<MethodInfo> getFixtureMethods() { return CollectionUtil.concat(setupSpecMethods, setupMethods, cleanupMethods, cleanupSpecMethods); } @SuppressWarnings("unchecked") public Iterable<MethodInfo> getAllFixtureMethods() { if (superSpec == null) return getFixtureMethods(); return CollectionUtil.concat(superSpec.getAllFixtureMethods(), getFixtureMethods()); } public List<FieldInfo> getFields() { return fields; } public List<FieldInfo> getAllFields() { if (superSpec == null) return fields; List<FieldInfo> result = new ArrayList<FieldInfo>(superSpec.getAllFields()); result.addAll(fields); return result; } public void addField(FieldInfo field) { fields.add(field); } public List<FeatureInfo> getFeatures() { return features; } public List<FeatureInfo> getAllFeatures() { if (superSpec == null) return features; List<FeatureInfo> result = new ArrayList<FeatureInfo>(superSpec.getAllFeatures()); result.addAll(features); return result; } public List<FeatureInfo> getAllFeaturesInExecutionOrder() { List<FeatureInfo> result = getAllFeatures(); Collections.sort(result, new Comparator<FeatureInfo>() { public int compare(FeatureInfo f1, FeatureInfo f2) { return f1.getExecutionOrder() - f2.getExecutionOrder(); } }); return result; } public void addFeature(FeatureInfo feature) { features.add(feature); } public List<IMethodInterceptor> getSetupInterceptors() { return setupInterceptors; } public void addSetupInterceptor(IMethodInterceptor interceptor) { setupInterceptors.add(interceptor); } public List<IMethodInterceptor> getCleanupInterceptors() { return cleanupInterceptors; } public void addCleanupInterceptor(IMethodInterceptor interceptor) { cleanupInterceptors.add(interceptor); } public List<IMethodInterceptor> getSetupSpecInterceptors() { return setupSpecInterceptors; } public void addSetupSpecInterceptor(IMethodInterceptor interceptor) { setupSpecInterceptors.add(interceptor); } public List<IMethodInterceptor> getCleanupSpecInterceptors() { return cleanupSpecInterceptors; } public void addCleanupSpecInterceptor(IMethodInterceptor interceptor) { cleanupSpecInterceptors.add(interceptor); } public List<IMethodInterceptor> getSharedInitializerInterceptors() { return sharedInitializerInterceptors; } public void addSharedInitializerInterceptor(IMethodInterceptor interceptor) { sharedInitializerInterceptors.add(interceptor); } public List<IMethodInterceptor> getInitializerInterceptors() { return initializerInterceptors; } public void addInitializerInterceptor(IMethodInterceptor interceptor) { initializerInterceptors.add(interceptor); } public List<IRunListener> getListeners() { return listeners; } public void addListener(IRunListener listener) { listeners.add(listener); } public void filterFeatures(final IFeatureFilter filter) { for (FeatureInfo feature: getAllFeatures()) { if (!filter.matches(feature)) feature.setExcluded(true); } } public void sortFeatures(final IFeatureSortOrder order) { List<FeatureInfo> features = getAllFeatures(); Collections.sort(features, order); for (int i = 0; i < features.size(); i++) features.get(i).setExecutionOrder(i); } public boolean isInitializerOrFixtureMethod(String className, String methodName) { if (!InternalIdentifiers.INITIALIZER_AND_FIXTURE_METHODS.contains(methodName)) return false; for (SpecInfo spec : getSpecsBottomToTop()) if (spec.getReflection().getName().equals(className)) return true; return false; } public String toFeatureName(String methodName) { for (FeatureInfo feature : getAllFeatures()) if (feature.hasBytecodeName(methodName)) return feature.getName(); return methodName; } }
package de.danoeh.antennapod.core.service.download; import android.database.Cursor; import org.apache.commons.lang3.Validate; import java.util.Date; import de.danoeh.antennapod.core.feed.FeedFile; import de.danoeh.antennapod.core.storage.PodDBAdapter; import de.danoeh.antennapod.core.util.DownloadError; /** Contains status attributes for one download */ public class DownloadStatus { /** * Downloaders should use this constant for the size attribute if necessary * so that the listadapters etc. can react properly. */ public static final int SIZE_UNKNOWN = -1; // ----------------------------------- ATTRIBUTES STORED IN DB /** Unique id for storing the object in database. */ protected long id; /** * A human-readable string which is shown to the user so that he can * identify the download. Should be the title of the item/feed/media or the * URL if the download has no other title. */ protected String title; protected DownloadError reason; /** * A message which can be presented to the user to give more information. * Should be null if Download was successful. */ protected String reasonDetailed; protected boolean successful; protected Date completionDate; protected long feedfileId; /** * Is used to determine the type of the feedfile even if the feedfile does * not exist anymore. The value should be FEEDFILETYPE_FEED, * FEEDFILETYPE_FEEDIMAGE or FEEDFILETYPE_FEEDMEDIA */ protected int feedfileType; // ------------------------------------ NOT STORED IN DB protected boolean done; protected boolean cancelled; /** Constructor for restoring Download status entries from DB. */ public DownloadStatus(long id, String title, long feedfileId, int feedfileType, boolean successful, DownloadError reason, Date completionDate, String reasonDetailed) { this.id = id; this.title = title; this.done = true; this.feedfileId = feedfileId; this.reason = reason; this.successful = successful; this.completionDate = (Date) completionDate.clone(); this.reasonDetailed = reasonDetailed; this.feedfileType = feedfileType; } public DownloadStatus(DownloadRequest request, DownloadError reason, boolean successful, boolean cancelled, String reasonDetailed) { Validate.notNull(request); this.title = request.getTitle(); this.feedfileId = request.getFeedfileId(); this.feedfileType = request.getFeedfileType(); this.reason = reason; this.successful = successful; this.cancelled = cancelled; this.reasonDetailed = reasonDetailed; this.completionDate = new Date(); } /** Constructor for creating new completed downloads. */ public DownloadStatus(FeedFile feedfile, String title, DownloadError reason, boolean successful, String reasonDetailed) { Validate.notNull(feedfile); this.title = title; this.done = true; this.feedfileId = feedfile.getId(); this.feedfileType = feedfile.getTypeAsInt(); this.reason = reason; this.successful = successful; this.completionDate = new Date(); this.reasonDetailed = reasonDetailed; } /** Constructor for creating new completed downloads. */ public DownloadStatus(long feedfileId, int feedfileType, String title, DownloadError reason, boolean successful, String reasonDetailed) { this.title = title; this.done = true; this.feedfileId = feedfileId; this.feedfileType = feedfileType; this.reason = reason; this.successful = successful; this.completionDate = new Date(); this.reasonDetailed = reasonDetailed; } public static DownloadStatus fromCursor(Cursor cursor) { int indexId = cursor.getColumnIndex(PodDBAdapter.KEY_ID); int indexTitle = cursor.getColumnIndex(PodDBAdapter.KEY_DOWNLOADSTATUS_TITLE); int indexFeedFile = cursor.getColumnIndex(PodDBAdapter.KEY_FEEDFILE); int indexFileFileType = cursor.getColumnIndex(PodDBAdapter.KEY_FEEDFILETYPE); int indexSuccessful = cursor.getColumnIndex(PodDBAdapter.KEY_SUCCESSFUL); int indexReason = cursor.getColumnIndex(PodDBAdapter.KEY_REASON); int indexCompletionDate = cursor.getColumnIndex(PodDBAdapter.KEY_COMPLETION_DATE); int indexReasonDetailed = cursor.getColumnIndex(PodDBAdapter.KEY_REASON_DETAILED); long id = cursor.getLong(indexId); String title = cursor.getString(indexTitle); long feedfileId = cursor.getLong(indexFeedFile); int feedfileType = cursor.getInt(indexFileFileType); boolean successful = cursor.getInt(indexSuccessful) > 0; int reason = cursor.getInt(indexReason); Date completionDate = new Date(cursor.getLong(indexCompletionDate)); String reasonDetailed = cursor.getString(indexReasonDetailed); return new DownloadStatus(id, title, feedfileId, feedfileType, successful, DownloadError.fromCode(reason), completionDate, reasonDetailed); } @Override public String toString() { return "DownloadStatus [id=" + id + ", title=" + title + ", reason=" + reason + ", reasonDetailed=" + reasonDetailed + ", successful=" + successful + ", completionDate=" + completionDate + ", feedfileId=" + feedfileId + ", feedfileType=" + feedfileType + ", done=" + done + ", cancelled=" + cancelled + "]"; } public long getId() { return id; } public String getTitle() { return title; } public DownloadError getReason() { return reason; } public String getReasonDetailed() { return reasonDetailed; } public boolean isSuccessful() { return successful; } public Date getCompletionDate() { return (Date) completionDate.clone(); } public long getFeedfileId() { return feedfileId; } public int getFeedfileType() { return feedfileType; } public boolean isDone() { return done; } public boolean isCancelled() { return cancelled; } public void setSuccessful() { this.successful = true; this.reason = DownloadError.SUCCESS; this.done = true; } public void setFailed(DownloadError reason, String reasonDetailed) { this.successful = false; this.reason = reason; this.reasonDetailed = reasonDetailed; this.done = true; } public void setCancelled() { this.successful = false; this.reason = DownloadError.ERROR_DOWNLOAD_CANCELLED; this.done = true; this.cancelled = true; } public void setCompletionDate(Date completionDate) { this.completionDate = (Date) completionDate.clone(); } public void setId(long id) { this.id = id; } }
package tsg.incremental.old; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.ListIterator; import tsg.Label; import tsg.TSNodeLabel; import tsg.TermLabel; public class Fringe { TermLabel rootLabel; TermLabel firstTerminalLabel; TermLabel secondTerminalLabel; LinkedList<TermLabel> otherTerminals = new LinkedList<TermLabel>(); /** * General Fringe * @param rootLabel * @param firstTerminalNode * @param secondTerminalNode * @param otherTerminals */ public Fringe(TermLabel rootLabel, TermLabel firstTerminalNode, TermLabel secondTerminalNode, LinkedList<TermLabel> otherTerminals) { this.rootLabel = rootLabel; this.firstTerminalLabel = firstTerminalNode; this.secondTerminalLabel = secondTerminalNode; this.otherTerminals = otherTerminals; } /** * Empty Fringe * @param rootLabel * @param firstTerminalNode */ public Fringe(TermLabel rootLabel, TermLabel firstTerminalNode) { this.rootLabel = rootLabel; this.firstTerminalLabel = firstTerminalNode; } public static Fringe computeFringe(TSNodeLabel fragment) { TermLabel rootLabel = TermLabel.getTermLabel(fragment); ArrayList<TSNodeLabel> terms = fragment.collectTerminalItems(); LinkedList<TermLabel> otherTerminals = new LinkedList<TermLabel>(); for(TSNodeLabel t : terms) { otherTerminals.add(TermLabel.getTermLabel(t.label, t.isLexical)); } TermLabel firstTerminalNode = otherTerminals.removeFirst(); TermLabel secondTerminalNode = otherTerminals.isEmpty() ? null : otherTerminals.removeFirst(); return new Fringe(rootLabel, firstTerminalNode, secondTerminalNode, otherTerminals); } public static Fringe computeFringeTime(TSNodeLabel fragment, long[] time) { long start = System.currentTimeMillis(); Fringe fringe = computeFringe(fragment); long stop = System.currentTimeMillis(); time[0] += (stop-start); return fringe; } public int size() { int i = this.secondTerminalLabel==null ? 1 : 2; return otherTerminals.size() + i; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(rootLabel.toString()); sb.append(": ["); sb.append(firstTerminalLabel); sb.append("|"); sb.append(secondTerminalLabel==null ? "#null#" : secondTerminalLabel); sb.append("|"); Iterator<TermLabel> iter = otherTerminals.iterator(); while(iter.hasNext()) { sb.append(iter.next()); if (iter.hasNext()) sb.append(","); } sb.append("]"); return sb.toString(); } public boolean isFirstLexFringe() { return firstTerminalLabel.isLexical; } // firstLexSecondLex public boolean isScanFringe() { return firstTerminalLabel.isLexical && secondTerminalLabel!=null && secondTerminalLabel.isLexical; } public boolean isFirstLexNextSubFringe() { return firstTerminalLabel.isLexical && secondTerminalLabel!=null && !secondTerminalLabel.isLexical; } public boolean isEmpty() { return firstTerminalLabel.isLexical && secondTerminalLabel==null; } public boolean isFirstSubNextLexFringe() { return !firstTerminalLabel.isLexical && secondTerminalLabel!=null && secondTerminalLabel.isLexical; } public boolean equals(Object o) { if (o==this) return true; if (o instanceof Fringe) { Fringe f = (Fringe)o; boolean thisSeconIsNull = this.secondTerminalLabel==null; boolean fSeconIsNull = f.secondTerminalLabel==null; if (thisSeconIsNull != fSeconIsNull) return false; return this.rootLabel.equals(f.rootLabel) && this.firstTerminalLabel.equals(f.firstTerminalLabel) && (thisSeconIsNull || this.secondTerminalLabel.equals(f.secondTerminalLabel)) && this.otherTerminals.equals(f.otherTerminals); } return false; } @Override public int hashCode() { int result = 31 + rootLabel.hashCode(); result = 31 * result + firstTerminalLabel.hashCode(); result = 31 * result + (secondTerminalLabel==null ? 1 : secondTerminalLabel.hashCode()); result = 31 * result + otherTerminals.hashCode(); //for(TSNodeLabel t : terminals) { // result = 31 * result + t.label.hashCode(); //} return result; } public boolean checkSubDown(Fringe nextFringe) { return this.isFirstLexNextSubFringe() && nextFringe.isFirstLexFringe() && this.secondTerminalLabel.equals(nextFringe.rootLabel); } public Fringe subDownTime(Fringe nextFringe, long[] time) { long start = System.currentTimeMillis(); Fringe f = this.subDown(nextFringe); long stop = System.currentTimeMillis(); time[0] += (stop-start); return f; } public Fringe subDown(Fringe nextFringe) { LinkedList<TermLabel> newOtherTerminals = new LinkedList<TermLabel>(); if (nextFringe.secondTerminalLabel!=null) newOtherTerminals.add(nextFringe.secondTerminalLabel); newOtherTerminals.addAll(nextFringe.otherTerminals); newOtherTerminals.addAll(this.otherTerminals); TermLabel newRootLabel = this.rootLabel; TermLabel newFirstTerminalNode = nextFringe.firstTerminalLabel; TermLabel newSecondTerminalNode = newOtherTerminals.isEmpty() ? null : newOtherTerminals.removeFirst(); return new Fringe(newRootLabel, newFirstTerminalNode, newSecondTerminalNode, newOtherTerminals); } public boolean checkSubUp(Fringe nextFringe) { return this.isEmpty() && nextFringe.isFirstSubNextLexFringe() && this.rootLabel.equals(nextFringe.firstTerminalLabel); } public Fringe subUpTime(Fringe nextFringe, long[] time) { long start = System.currentTimeMillis(); Fringe f = this.subUp(nextFringe); long stop = System.currentTimeMillis(); time[0] += (stop-start); return f; } public Fringe subUp(Fringe nextFringe) { LinkedList<TermLabel> newOtherTerminals = new LinkedList<TermLabel>(); newOtherTerminals.addAll(nextFringe.otherTerminals); TermLabel newRootLabel = nextFringe.rootLabel; TermLabel newFirstTerminalNode = nextFringe.secondTerminalLabel; TermLabel newSecondTerminalNode = newOtherTerminals.isEmpty() ? null : newOtherTerminals.removeFirst(); return new Fringe(newRootLabel, newFirstTerminalNode, newSecondTerminalNode, newOtherTerminals); } public Fringe scanTime(long[] time) { long start = System.currentTimeMillis(); Fringe f = this.scan(); long stop = System.currentTimeMillis(); time[0] += (stop-start); return f; } public Fringe scan() { LinkedList<TermLabel> newOtherTerminals = new LinkedList<TermLabel>(); newOtherTerminals.addAll(this.otherTerminals); TermLabel newRootLabel = this.rootLabel; TermLabel newFirstTerminalNode = this.secondTerminalLabel; TermLabel newSecondTerminalNode = newOtherTerminals.isEmpty() ? null : newOtherTerminals.removeFirst(); return new Fringe(newRootLabel, newFirstTerminalNode, newSecondTerminalNode, newOtherTerminals); } }
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Preconditions; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.google.javascript.jscomp.CodingConvention.SubclassRelationship; import com.google.javascript.jscomp.DefinitionsRemover.Definition; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * Garbage collection for variable and function definitions. Basically performs * a mark-and-sweep type algorithm over the JavaScript parse tree. * * For each scope: * (1) Scan the variable/function declarations at that scope. * (2) Traverse the scope for references, marking all referenced variables. * Unlike other compiler passes, this is a pre-order traversal, not a * post-order traversal. * (3) If the traversal encounters an assign without other side-effects, * create a continuation. Continue the continuation iff the assigned * variable is referenced. * (4) When the traversal completes, remove all unreferenced variables. * * If it makes it easier, you can think of the continuations of the traversal * as a reference graph. Each continuation represents a set of edges, where the * source node is a known variable, and the destination nodes are lazily * evaluated when the continuation is executed. * * This algorithm is similar to the algorithm used by {@code SmartNameRemoval}. * {@code SmartNameRemoval} maintains an explicit graph of dependencies * between global symbols. However, {@code SmartNameRemoval} cannot handle * non-trivial edges in the reference graph ("A is referenced iff both B and C * are referenced"), or local variables. {@code SmartNameRemoval} is also * substantially more complicated because it tries to handle namespaces * (which is largely unnecessary in the presence of {@code CollapseProperties}. * * This pass also uses a more complex analysis of assignments, where * an assignment to a variable or a property of that variable does not * necessarily count as a reference to that variable, unless we can prove * that it modifies external state. This is similar to * {@code FlowSensitiveInlineVariables}, except that it works for variables * used across scopes. * * @author nicksantos@google.com (Nick Santos) */ class RemoveUnusedVars implements CompilerPass, OptimizeCalls.CallGraphCompilerPass { private final AbstractCompiler compiler; private final CodingConvention codingConvention; private final boolean removeGlobals; private boolean preserveFunctionExpressionNames; /** * Keep track of variables that we've referenced. */ private final Set<Var> referenced = new HashSet<>(); /** * Keep track of variables that might be unreferenced. */ private final List<Var> maybeUnreferenced = new ArrayList<>(); /** * Keep track of scopes that we've traversed. */ private final List<Scope> allFunctionScopes = new ArrayList<>(); /** * Keep track of assigns to variables that we haven't referenced. */ private final Multimap<Var, Assign> assignsByVar = ArrayListMultimap.create(); /** * The assigns, indexed by the NAME node that they assign to. */ private final Map<Node, Assign> assignsByNode = new HashMap<>(); /** * Subclass name -> class-defining call EXPR node. (like inherits) */ private final Multimap<Var, Node> classDefiningCalls = ArrayListMultimap.create(); /** * Keep track of continuations that are finished iff the variable they're * indexed by is referenced. */ private final Multimap<Var, Continuation> continuations = ArrayListMultimap.create(); private boolean modifyCallSites; private CallSiteOptimizer callSiteOptimizer; RemoveUnusedVars( AbstractCompiler compiler, boolean removeGlobals, boolean preserveFunctionExpressionNames, boolean modifyCallSites) { this.compiler = compiler; this.codingConvention = compiler.getCodingConvention(); this.removeGlobals = removeGlobals; this.preserveFunctionExpressionNames = preserveFunctionExpressionNames; this.modifyCallSites = modifyCallSites; } /** * Traverses the root, removing all unused variables. Multiple traversals * may occur to ensure all unused variables are removed. */ @Override public void process(Node externs, Node root) { Preconditions.checkState(compiler.getLifeCycleStage().isNormalized()); boolean shouldResetModifyCallSites = false; if (this.modifyCallSites) { // When RemoveUnusedVars is run after OptimizeCalls, this.modifyCallSites // is true. But if OptimizeCalls stops making changes, PhaseOptimizer // stops running it, so we come to RemoveUnusedVars and the defFinder is // null. In this case, we temporarily set this.modifyCallSites to false // for this run, and then reset it back to true at the end, for // subsequent runs. if (compiler.getDefinitionFinder() == null) { this.modifyCallSites = false; shouldResetModifyCallSites = true; } } process(externs, root, compiler.getDefinitionFinder()); // When doing OptimizeCalls, RemoveUnusedVars is the last pass in the // sequence, so the def finder must not be used by any subsequent passes. compiler.setDefinitionFinder(null); if (shouldResetModifyCallSites) { this.modifyCallSites = true; } } @Override public void process( Node externs, Node root, DefinitionUseSiteFinder defFinder) { if (modifyCallSites) { Preconditions.checkNotNull(defFinder); callSiteOptimizer = new CallSiteOptimizer(compiler, defFinder); } traverseAndRemoveUnusedReferences(root); if (callSiteOptimizer != null) { callSiteOptimizer.applyChanges(); } } /** * Traverses a node recursively. Call this once per pass. */ private void traverseAndRemoveUnusedReferences(Node root) { Scope scope = SyntacticScopeCreator.makeUntyped(compiler).createScope(root, null); traverseNode(root, null, scope); if (removeGlobals) { collectMaybeUnreferencedVars(scope); } interpretAssigns(); removeUnreferencedVars(); for (Scope fnScope : allFunctionScopes) { removeUnreferencedFunctionArgs(fnScope); } } /** * Traverses everything in the current scope and marks variables that * are referenced. * * During traversal, we identify subtrees that will only be * referenced if their enclosing variables are referenced. Instead of * traversing those subtrees, we create a continuation for them, * and traverse them lazily. */ private void traverseNode(Node n, Node parent, Scope scope) { Token type = n.getToken(); Var var = null; switch (type) { case FUNCTION: // If this function is a removable var, then create a continuation // for it instead of traversing immediately. if (NodeUtil.isFunctionDeclaration(n)) { var = scope.getVar(n.getFirstChild().getString()); } if (var != null && isRemovableVar(var)) { continuations.put(var, new Continuation(n, scope)); } else { traverseFunction(n, scope); } return; case ASSIGN: Assign maybeAssign = Assign.maybeCreateAssign(n); if (maybeAssign != null) { // Put this in the assign map. It might count as a reference, // but we won't know that until we have an index of all assigns. var = scope.getVar(maybeAssign.nameNode.getString()); if (var != null) { assignsByVar.put(var, maybeAssign); assignsByNode.put(maybeAssign.nameNode, maybeAssign); if (isRemovableVar(var) && !maybeAssign.mayHaveSecondarySideEffects) { // If the var is unreferenced and performing this assign has // no secondary side effects, then we can create a continuation // for it instead of traversing immediately. continuations.put(var, new Continuation(n, scope)); return; } } } break; case CALL: Var modifiedVar = null; // Look for calls to inheritance-defining calls (such as goog.inherits). SubclassRelationship subclassRelationship = codingConvention.getClassesDefinedByCall(n); if (subclassRelationship != null) { modifiedVar = scope.getVar(subclassRelationship.subclassName); } else { // Look for calls to addSingletonGetter calls. String className = codingConvention.getSingletonGetterClassName(n); if (className != null) { modifiedVar = scope.getVar(className); } } // Don't try to track the inheritance calls for non-globals. It would // be more correct to only not track when the subclass does not // reference a constructor, but checking that it is a global is // easier and mostly the same. if (modifiedVar != null && modifiedVar.isGlobal() && !referenced.contains(modifiedVar)) { // Save a reference to the EXPR node. classDefiningCalls.put(modifiedVar, parent); continuations.put(modifiedVar, new Continuation(n, scope)); return; } break; case NAME: var = scope.getVar(n.getString()); if (parent.isVar()) { Node value = n.getFirstChild(); if (value != null && var != null && isRemovableVar(var) && !NodeUtil.mayHaveSideEffects(value, compiler)) { // If the var is unreferenced and creating its value has no side // effects, then we can create a continuation for it instead // of traversing immediately. continuations.put(var, new Continuation(n, scope)); return; } } else { // If arguments is escaped, we just assume the worst and continue // on all the parameters. if ("arguments".equals(n.getString()) && scope.isLocal()) { Node lp = scope.getRootNode().getSecondChild(); for (Node a = lp.getFirstChild(); a != null; a = a.getNext()) { markReferencedVar(scope.getVar(a.getString())); } } // All name references that aren't declarations or assigns // are references to other vars. if (var != null) { // If that var hasn't already been marked referenced, then // start tracking it. If this is an assign, do nothing // for now. if (isRemovableVar(var)) { if (!assignsByNode.containsKey(n)) { markReferencedVar(var); } } else { markReferencedVar(var); } } } break; default: break; } for (Node c = n.getFirstChild(); c != null; c = c.getNext()) { traverseNode(c, n, scope); } } private boolean isRemovableVar(Var var) { // Global variables are off-limits if the user might be using them. if (!removeGlobals && var.isGlobal()) { return false; } // Referenced variables are off-limits. if (referenced.contains(var)) { return false; } // Exported variables are off-limits. return !codingConvention.isExported(var.getName()); } /** * Traverses a function, which creates a new scope in JavaScript. * * Note that CATCH blocks also create a new scope, but only for the * catch variable. Declarations within the block actually belong to the * enclosing scope. Because we don't remove catch variables, there's * no need to treat CATCH blocks differently like we do functions. */ private void traverseFunction(Node n, Scope parentScope) { Preconditions.checkState(n.getChildCount() == 3, n); Preconditions.checkState(n.isFunction(), n); final Node body = n.getLastChild(); Preconditions.checkState(body.getNext() == null && body.isBlock(), body); Scope fnScope = SyntacticScopeCreator.makeUntyped(compiler).createScope(n, parentScope); traverseNode(body, n, fnScope); collectMaybeUnreferencedVars(fnScope); allFunctionScopes.add(fnScope); } /** * For each variable in this scope that we haven't found a reference * for yet, add it to the list of variables to check later. */ private void collectMaybeUnreferencedVars(Scope scope) { for (Var var : scope.getVarIterable()) { if (isRemovableVar(var)) { maybeUnreferenced.add(var); } } } /** * Removes unreferenced arguments from a function declaration and when * possible the function's callSites. * * @param fnScope The scope inside the function */ private void removeUnreferencedFunctionArgs(Scope fnScope) { // Notice that removing unreferenced function args breaks // Function.prototype.length. In advanced mode, we don't really care // about this: we consider "length" the equivalent of reflecting on // the function's lexical source. // // Rather than create a new option for this, we assume that if the user // is removing globals, then it's OK to remove unused function args. // // See http://code.google.com/p/closure-compiler/issues/detail?id=253 if (!removeGlobals) { return; } Node function = fnScope.getRootNode(); Preconditions.checkState(function.isFunction()); if (NodeUtil.isGetOrSetKey(function.getParent())) { // The parameters object literal setters can not be removed. return; } Node argList = getFunctionArgList(function); boolean modifyCallers = modifyCallSites && callSiteOptimizer.canModifyCallers(function); if (!modifyCallers) { // Strip unreferenced args off the end of the function declaration. Node lastArg; while ((lastArg = argList.getLastChild()) != null) { Var var = fnScope.getVar(lastArg.getString()); if (!referenced.contains(var)) { compiler.reportChangeToEnclosingScope(lastArg); argList.removeChild(lastArg); } else { break; } } } else { callSiteOptimizer.optimize(fnScope, referenced); } } /** * @return the LP node containing the function parameters. */ private static Node getFunctionArgList(Node function) { return function.getSecondChild(); } private static class CallSiteOptimizer { private final AbstractCompiler compiler; private final DefinitionUseSiteFinder defFinder; private final List<Node> toRemove = new ArrayList<>(); private final List<Node> toReplaceWithZero = new ArrayList<>(); CallSiteOptimizer( AbstractCompiler compiler, DefinitionUseSiteFinder defFinder) { this.compiler = compiler; this.defFinder = defFinder; } public void optimize(Scope fnScope, Set<Var> referenced) { Node function = fnScope.getRootNode(); Preconditions.checkState(function.isFunction()); Node argList = getFunctionArgList(function); // In this path we try to modify all the call sites to remove unused // function parameters. boolean changeCallSignature = canChangeSignature(function); markUnreferencedFunctionArgs( fnScope, function, referenced, argList.getFirstChild(), 0, changeCallSignature); } /** * Applies optimizations to all previously marked nodes. */ public void applyChanges() { for (Node n : toRemove) { compiler.reportChangeToEnclosingScope(n); n.getParent().removeChild(n); } for (Node n : toReplaceWithZero) { compiler.reportChangeToEnclosingScope(n); n.getParent().replaceChild(n, IR.number(0).srcref(n)); } } /** * For each unused function parameter, determine if it can be removed * from all the call sites, if so, remove it from the function signature * and the call sites otherwise replace the unused value where possible * with a constant (0). * * @param scope The function scope * @param function The function * @param param The current parameter node in the parameter list. * @param paramIndex The index of the current parameter * @param canChangeSignature Whether function signature can be change. * @return Whether there is a following function parameter. */ private boolean markUnreferencedFunctionArgs( Scope scope, Node function, Set<Var> referenced, Node param, int paramIndex, boolean canChangeSignature) { if (param != null) { // Take care of the following siblings first. boolean hasFollowing = markUnreferencedFunctionArgs( scope, function, referenced, param.getNext(), paramIndex + 1, canChangeSignature); Var var = scope.getVar(param.getString()); if (!referenced.contains(var)) { Preconditions.checkNotNull(var); // Remove call parameter if we can generally change the signature // or if it is the last parameter in the parameter list. boolean modifyAllCallSites = canChangeSignature || !hasFollowing; if (modifyAllCallSites) { modifyAllCallSites = canRemoveArgFromCallSites( function, paramIndex); } tryRemoveArgFromCallSites(function, paramIndex, modifyAllCallSites); // Remove an unused function parameter if all the call sites can // be modified to remove it, or if it is the last parameter. if (modifyAllCallSites || !hasFollowing) { toRemove.add(param); return hasFollowing; } } return true; } else { // Anything past the last formal parameter can be removed from the call // sites. tryRemoveAllFollowingArgs(function, paramIndex - 1); return false; } } /** * Remove all references to a parameter, otherwise simplify the known * references. * @return Whether all the references were removed. */ private boolean canRemoveArgFromCallSites(Node function, int argIndex) { Definition definition = getFunctionDefinition(function); // Check all the call sites. for (UseSite site : defFinder.getUseSites(definition)) { if (isModifiableCallSite(site)) { Node arg = getArgumentForCallOrNewOrDotCall(site, argIndex); // TODO(johnlenz): try to remove parameters with side-effects by // decomposing the call expression. if (arg != null && NodeUtil.mayHaveSideEffects(arg, compiler)) { return false; } } else { return false; } } return true; } /** * Remove all references to a parameter if possible otherwise simplify the * side-effect free parameters. */ private void tryRemoveArgFromCallSites( Node function, int argIndex, boolean canModifyAllSites) { Definition definition = getFunctionDefinition(function); for (UseSite site : defFinder.getUseSites(definition)) { if (isModifiableCallSite(site)) { Node arg = getArgumentForCallOrNewOrDotCall(site, argIndex); if (arg != null) { // Even if we can't change the signature in general we can always // remove an unused value off the end of the parameter list. if (canModifyAllSites || (arg.getNext() == null && !NodeUtil.mayHaveSideEffects(arg, compiler))) { toRemove.add(arg); } else { // replace the node in the arg with 0 if (!NodeUtil.mayHaveSideEffects(arg, compiler) && (!arg.isNumber() || arg.getDouble() != 0)) { toReplaceWithZero.add(arg); } } } } } } /** * Remove all the following parameters without side-effects */ private void tryRemoveAllFollowingArgs(Node function, final int argIndex) { Definition definition = getFunctionDefinition(function); for (UseSite site : defFinder.getUseSites(definition)) { if (!isModifiableCallSite(site)) { continue; } Node arg = getArgumentForCallOrNewOrDotCall(site, argIndex + 1); while (arg != null) { if (!NodeUtil.mayHaveSideEffects(arg)) { toRemove.add(arg); } arg = arg.getNext(); } } } /** * Returns the nth argument node given a usage site for a direct function * call or for a func.call() node. */ private static Node getArgumentForCallOrNewOrDotCall(UseSite site, final int argIndex) { int adjustedArgIndex = argIndex; Node parent = site.node.getParent(); if (NodeUtil.isFunctionObjectCall(parent)) { adjustedArgIndex++; } return NodeUtil.getArgumentForCallOrNew(parent, adjustedArgIndex); } /** * @param function * @return Whether the callers to this function can be modified in any way. */ boolean canModifyCallers(Node function) { if (NodeUtil.isVarArgsFunction(function)) { return false; } DefinitionSite defSite = defFinder.getDefinitionForFunction(function); if (defSite == null) { return false; } Definition definition = defSite.definition; // Be conservative, don't try to optimize any declaration that isn't as // simple function declaration or assignment. if (!NodeUtil.isSimpleFunctionDeclaration(function)) { return false; } return defFinder.canModifyDefinition(definition); } /** * @param site The site to inspect * @return Whether the call site is suitable for modification */ private static boolean isModifiableCallSite(UseSite site) { return DefinitionUseSiteFinder.isCallOrNewSite(site) && !NodeUtil.isFunctionObjectApply(site.node.getParent()); } /** * @return Whether the definitionSite represents a function whose call * signature can be modified. */ private boolean canChangeSignature(Node function) { Definition definition = getFunctionDefinition(function); CodingConvention convention = compiler.getCodingConvention(); Preconditions.checkState(!definition.isExtern()); Collection<UseSite> useSites = defFinder.getUseSites(definition); for (UseSite site : useSites) { Node parent = site.node.getParent(); // This was a use site removed by something else before we run. // 1. By another pass before us which means the definition graph is // no updated properly. // 2. By the continuations algorithm above. if (parent == null) { continue; // Ignore it. } // Ignore references within goog.inherits calls. if (parent.isCall() && convention.getClassesDefinedByCall(parent) != null) { continue; } // Accessing the property directly prevents rewrite. if (!DefinitionUseSiteFinder.isCallOrNewSite(site)) { if (!(parent.isGetProp() && NodeUtil.isFunctionObjectCall(parent.getParent()))) { return false; } } if (NodeUtil.isFunctionObjectApply(parent)) { return false; } // TODO(johnlenz): support specialization // Multiple definitions prevent rewrite. // Attempt to validate the state of the simple definition finder. Node nameNode = site.node; Collection<Definition> singleSiteDefinitions = defFinder.getDefinitionsReferencedAt(nameNode); Preconditions.checkState(singleSiteDefinitions.size() == 1); Preconditions.checkState(singleSiteDefinitions.contains(definition)); } return true; } /** * @param function * @return the Definition object for the function. */ private Definition getFunctionDefinition(Node function) { DefinitionSite definitionSite = defFinder.getDefinitionForFunction( function); Preconditions.checkNotNull(definitionSite); Definition definition = definitionSite.definition; Preconditions.checkState(!definitionSite.inExterns); Preconditions.checkState(definition.getRValue() == function); return definition; } } /** * Look at all the property assigns to all variables. * These may or may not count as references. For example, * * <code> * var x = {}; * x.foo = 3; // not a reference. * var y = foo(); * y.foo = 3; // is a reference. * </code> * * Interpreting assignments could mark a variable as referenced that * wasn't referenced before, in order to keep it alive. Because we find * references by lazily traversing subtrees, marking a variable as * referenced could trigger new traversals of new subtrees, which could * find new references. * * Therefore, this interpretation needs to be run to a fixed point. */ private void interpretAssigns() { boolean changes = false; do { changes = false; // We can't use traditional iterators and iterables for this list, // because our lazily-evaluated continuations will modify it while // we traverse it. for (int current = 0; current < maybeUnreferenced.size(); current++) { Var var = maybeUnreferenced.get(current); if (referenced.contains(var)) { maybeUnreferenced.remove(current); current--; } else { boolean assignedToUnknownValue = false; boolean hasPropertyAssign = false; if (var.getParentNode().isVar() && !NodeUtil.isForIn(var.getParentNode().getParent())) { Node value = var.getInitialValue(); assignedToUnknownValue = value != null && !NodeUtil.isLiteralValue(value, true); } else { // This was initialized to a function arg or a catch param // or a for...in variable. assignedToUnknownValue = true; } boolean maybeEscaped = false; for (Assign assign : assignsByVar.get(var)) { if (assign.isPropertyAssign) { hasPropertyAssign = true; } else if (!NodeUtil.isLiteralValue( assign.assignNode.getLastChild(), true)) { assignedToUnknownValue = true; } if (assign.maybeAliased) { maybeEscaped = true; } } if ((assignedToUnknownValue || maybeEscaped) && hasPropertyAssign) { changes = markReferencedVar(var) || changes; maybeUnreferenced.remove(current); current--; } } } } while (changes); } /** * Remove all assigns to a var. */ private void removeAllAssigns(Var var) { for (Assign assign : assignsByVar.get(var)) { compiler.reportChangeToEnclosingScope(assign.assignNode); assign.remove(); } } /** * Marks a var as referenced, recursing into any values of this var * that we skipped. * @return True if this variable had not been referenced before. */ private boolean markReferencedVar(Var var) { if (referenced.add(var)) { for (Continuation c : continuations.get(var)) { c.apply(); } return true; } return false; } /** * Removes any vars in the scope that were not referenced. Removes any * assignments to those variables as well. */ private void removeUnreferencedVars() { for (Var var : maybeUnreferenced) { // Remove calls to inheritance-defining functions where the unreferenced // class is the subclass. for (Node exprCallNode : classDefiningCalls.get(var)) { compiler.reportChangeToEnclosingScope(exprCallNode); NodeUtil.removeChild(exprCallNode.getParent(), exprCallNode); } // Regardless of what happens to the original declaration, // we need to remove all assigns, because they may contain references // to other unreferenced variables. removeAllAssigns(var); compiler.addToDebugLog("Unreferenced var: " + var.name); Node nameNode = var.nameNode; Node toRemove = nameNode.getParent(); Node parent = toRemove.getParent(); Preconditions.checkState( toRemove.isVar() || toRemove.isFunction() || toRemove.isParamList() && parent.isFunction(), "We should only declare vars and functions and function args"); if (toRemove.isParamList() && parent.isFunction()) { // Don't remove function arguments here. That's a special case // that's taken care of in removeUnreferencedFunctionArgs. } else if (NodeUtil.isFunctionExpression(toRemove)) { if (!preserveFunctionExpressionNames) { compiler.reportChangeToEnclosingScope(toRemove); toRemove.getFirstChild().setString(""); } // Don't remove bleeding functions. } else if (parent != null && parent.isFor() && parent.getChildCount() < 4) { // foreach iterations have 3 children. Leave them alone. } else if (toRemove.isVar() && nameNode.hasChildren() && NodeUtil.mayHaveSideEffects(nameNode.getFirstChild(), compiler)) { // If this is a single var declaration, we can at least remove the // declaration itself and just leave the value, e.g., // var a = foo(); => foo(); if (toRemove.getChildCount() == 1) { compiler.reportChangeToEnclosingScope(toRemove); parent.replaceChild(toRemove, IR.exprResult(nameNode.removeFirstChild())); } } else if (toRemove.isVar() && toRemove.getChildCount() > 1) { // For var declarations with multiple names (i.e. var a, b, c), // only remove the unreferenced name compiler.reportChangeToEnclosingScope(toRemove); toRemove.removeChild(nameNode); } else if (parent != null) { compiler.reportChangeToEnclosingScope(toRemove); NodeUtil.removeChild(parent, toRemove); } } } /** * Our progress in a traversal can be expressed completely as the * current node and scope. The continuation lets us save that * information so that we can continue the traversal later. */ private class Continuation { private final Node node; private final Scope scope; Continuation(Node node, Scope scope) { this.node = node; this.scope = scope; } void apply() { if (NodeUtil.isFunctionDeclaration(node)) { traverseFunction(node, scope); } else { for (Node child = node.getFirstChild(); child != null; child = child.getNext()) { traverseNode(child, node, scope); } } } } private static class Assign { final Node assignNode; final Node nameNode; // If false, then this is an assign to the normal variable. Otherwise, // this is an assign to a property of that variable. final boolean isPropertyAssign; // Secondary side effects are any side effects in this assign statement // that aren't caused by the assignment operation itself. For example, // a().b = 3; // a = b(); // var foo = (a = b); // In the first two cases, the sides of the assignment have side-effects. // In the last one, the result of the assignment is used, so we // are conservative and assume that it may be used in a side-effecting // way. final boolean mayHaveSecondarySideEffects; // If true, the value may have escaped and any modification is a use. final boolean maybeAliased; Assign(Node assignNode, Node nameNode, boolean isPropertyAssign) { Preconditions.checkState(NodeUtil.isAssignmentOp(assignNode)); this.assignNode = assignNode; this.nameNode = nameNode; this.isPropertyAssign = isPropertyAssign; this.maybeAliased = NodeUtil.isExpressionResultUsed(assignNode); this.mayHaveSecondarySideEffects = maybeAliased || NodeUtil.mayHaveSideEffects(assignNode.getFirstChild()) || NodeUtil.mayHaveSideEffects(assignNode.getLastChild()); } /** * If this is an assign to a variable or its property, return it. * Otherwise, return null. */ static Assign maybeCreateAssign(Node assignNode) { Preconditions.checkState(NodeUtil.isAssignmentOp(assignNode)); // Skip one level of GETPROPs or GETELEMs. // // Don't skip more than one level, because then we get into // situations where assigns to properties of properties will always // trigger side-effects, and the variable they're on cannot be removed. boolean isPropAssign = false; Node current = assignNode.getFirstChild(); if (NodeUtil.isGet(current)) { current = current.getFirstChild(); isPropAssign = true; if (current.isGetProp() && current.getLastChild().getString().equals("prototype")) { // Prototype properties sets should be considered like normal // property sets. current = current.getFirstChild(); } } if (current.isName()) { return new Assign(assignNode, current, isPropAssign); } return null; } /** * Replace the current assign with its right hand side. */ void remove() { Node parent = assignNode.getParent(); if (mayHaveSecondarySideEffects) { Node replacement = assignNode.getLastChild().detach(); // Aggregate any expressions in GETELEMs. for (Node current = assignNode.getFirstChild(); !current.isName(); current = current.getFirstChild()) { if (current.isGetElem()) { replacement = IR.comma( current.getLastChild().detach(), replacement); replacement.useSourceInfoIfMissingFrom(current); } } parent.replaceChild(assignNode, replacement); } else { Node grandparent = parent.getParent(); if (parent.isExprResult()) { grandparent.removeChild(parent); } else { parent.replaceChild(assignNode, assignNode.getLastChild().detach()); } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.reef.io.network.impl; import org.apache.reef.exception.evaluator.NetworkException; import org.apache.reef.io.Tuple; import org.apache.reef.io.network.ConnectionFactory; import org.apache.reef.io.network.Message; import org.apache.reef.io.network.NetworkConnectionService; import org.apache.reef.io.network.exception.NetworkRuntimeException; import org.apache.reef.io.network.impl.config.NetworkConnectionServiceIdFactory; import org.apache.reef.io.network.impl.config.NetworkConnectionServicePort; import org.apache.reef.io.network.naming.NameResolver; import org.apache.reef.tang.annotations.Parameter; import org.apache.reef.wake.EStage; import org.apache.reef.wake.EventHandler; import org.apache.reef.wake.Identifier; import org.apache.reef.wake.IdentifierFactory; import org.apache.reef.wake.impl.SingleThreadStage; import org.apache.reef.wake.remote.Codec; import org.apache.reef.wake.remote.impl.TransportEvent; import org.apache.reef.wake.remote.transport.Link; import org.apache.reef.wake.remote.transport.LinkListener; import org.apache.reef.wake.remote.transport.Transport; import org.apache.reef.wake.remote.transport.TransportFactory; import javax.inject.Inject; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.logging.Level; import java.util.logging.Logger; /** * Default Network connection service implementation. */ public final class NetworkConnectionServiceImpl implements NetworkConnectionService { private static final Logger LOG = Logger.getLogger(NetworkConnectionServiceImpl.class.getName()); /** * An identifier factory registering network connection service id. */ private final IdentifierFactory idFactory; /** * A name resolver looking up nameserver. */ private final NameResolver nameResolver; /** * A messaging transport. */ private final Transport transport; /** * A map of (id of connection factory, a connection factory instance). */ private final ConcurrentMap<String, NetworkConnectionFactory> connFactoryMap; // TODO[JIRA REEF-637] Remove the deprecated field. /** * A network connection service identifier. * @deprecated in 0.13. Use ConnectionFactory.getLocalEndPointId instead. */ @Deprecated private Identifier myId; /** * A network connection service message codec. */ private final Codec<NetworkConnectionServiceMessage> nsCodec; /** * A network connection service link listener. */ private final LinkListener<NetworkConnectionServiceMessage> nsLinkListener; /** * A stage registering identifiers to nameServer. */ private final EStage<Tuple<Identifier, InetSocketAddress>> nameServiceRegisteringStage; /** * A stage unregistering identifiers from nameServer. */ private final EStage<Identifier> nameServiceUnregisteringStage; /** * A boolean flag that indicates whether the NetworkConnectionService is closed. */ private final AtomicBoolean isClosed; /** * A DELIMITER to make a concatenated end point id {{connectionFactoryId}}{{DELIMITER}}{{localEndPointId}}. */ private static final String DELIMITER = "/"; @Inject private NetworkConnectionServiceImpl( @Parameter(NetworkConnectionServiceIdFactory.class) final IdentifierFactory idFactory, @Parameter(NetworkConnectionServicePort.class) final int nsPort, final TransportFactory transportFactory, final NameResolver nameResolver) { this.idFactory = idFactory; this.connFactoryMap = new ConcurrentHashMap<>(); this.nsCodec = new NetworkConnectionServiceMessageCodec(idFactory, connFactoryMap); this.nsLinkListener = new NetworkConnectionServiceLinkListener(connFactoryMap); final EventHandler<TransportEvent> recvHandler = new NetworkConnectionServiceReceiveHandler(connFactoryMap, nsCodec); this.nameResolver = nameResolver; this.transport = transportFactory.newInstance(nsPort, recvHandler, recvHandler, new NetworkConnectionServiceExceptionHandler()); this.nameServiceRegisteringStage = new SingleThreadStage<>( "NameServiceRegisterer", new EventHandler<Tuple<Identifier, InetSocketAddress>>() { @Override public void onNext(final Tuple<Identifier, InetSocketAddress> tuple) { try { nameResolver.register(tuple.getKey(), tuple.getValue()); LOG.log(Level.FINEST, "Registered {0} with nameservice", tuple.getKey()); } catch (final Exception ex) { final String msg = "Unable to register " + tuple.getKey() + " with name service"; LOG.log(Level.WARNING, msg, ex); throw new RuntimeException(msg, ex); } } }, 5); this.nameServiceUnregisteringStage = new SingleThreadStage<>( "NameServiceRegisterer", new EventHandler<Identifier>() { @Override public void onNext(final Identifier id) { try { nameResolver.unregister(id); LOG.log(Level.FINEST, "Unregistered {0} with nameservice", id); } catch (final Exception ex) { final String msg = "Unable to unregister " + id + " with name service"; LOG.log(Level.WARNING, msg, ex); throw new RuntimeException(msg, ex); } } }, 5); this.isClosed = new AtomicBoolean(); } // TODO[JIRA REEF-637] Remove the deprecated method. /** * @deprecated in 0.13. Use registerConnectionFactory(Identifier, Codec, EventHandler, LinkListener, Identifier) * instead. */ @Deprecated @Override public <T> void registerConnectionFactory(final Identifier connFactoryId, final Codec<T> codec, final EventHandler<Message<T>> eventHandler, final LinkListener<Message<T>> linkListener) throws NetworkException { final String id = connFactoryId.toString(); if (connFactoryMap.get(id) != null) { throw new NetworkException("ConnectionFactory " + connFactoryId + " was already registered."); } final ConnectionFactory connFactory = connFactoryMap.putIfAbsent(id, new NetworkConnectionFactory<>(this, connFactoryId, codec, eventHandler, linkListener, null)); if (connFactory != null) { throw new NetworkException("ConnectionFactory " + connFactoryId + " was already registered."); } } private void checkBeforeRegistration(final String connectionFactoryId) { if (isClosed.get()) { throw new NetworkRuntimeException("Unable to register new ConnectionFactory to closed NetworkConnectionService"); } if (connFactoryMap.get(connectionFactoryId) != null) { throw new NetworkRuntimeException("ConnectionFactory " + connectionFactoryId + " was already registered."); } if (connectionFactoryId.contains(DELIMITER)) { throw new NetworkRuntimeException( "The ConnectionFactoryId " + connectionFactoryId + " should not contain " + DELIMITER); } } @Override public <T> ConnectionFactory<T> registerConnectionFactory( final Identifier connectionFactoryId, final Codec<T> codec, final EventHandler<Message<T>> eventHandler, final LinkListener<Message<T>> linkListener, final Identifier localEndPointId) { final String id = connectionFactoryId.toString(); checkBeforeRegistration(id); final NetworkConnectionFactory<T> connectionFactory = new NetworkConnectionFactory<>( this, connectionFactoryId, codec, eventHandler, linkListener, localEndPointId); final Identifier localId = getEndPointIdWithConnectionFactoryId(connectionFactoryId, localEndPointId); nameServiceRegisteringStage.onNext(new Tuple<>(localId, (InetSocketAddress) transport.getLocalAddress())); if (connFactoryMap.putIfAbsent(id, connectionFactory) != null) { throw new NetworkRuntimeException("ConnectionFactory " + connectionFactoryId + " was already registered."); } LOG.log(Level.INFO, "ConnectionFactory {0} was registered", id); return connectionFactory; } @Override public void unregisterConnectionFactory(final Identifier connFactoryId) { final String id = connFactoryId.toString(); final NetworkConnectionFactory connFactory = connFactoryMap.remove(id); if (connFactory != null) { LOG.log(Level.INFO, "ConnectionFactory {0} was unregistered", id); if (!connFactory.isRegisteredByDeprecatedMethod()) { // TODO[JIRA REEF-637] : Remove the redundant check. final Identifier localId = getEndPointIdWithConnectionFactoryId( connFactoryId, connFactory.getLocalEndPointId()); nameServiceUnregisteringStage.onNext(localId); } } else { LOG.log(Level.WARNING, "ConnectionFactory of {0} is null", id); } } // TODO[JIRA REEF-637] Remove the deprecated method. /** * Registers a source identifier of NetworkConnectionService. * @param ncsId * @throws Exception * @deprecated in 0.13. Use registerConnectionFactory(Identifier, Codec, EventHandler, LinkListener, Identifier) * instead. */ @Deprecated @Override public void registerId(final Identifier ncsId) { LOG.log(Level.INFO, "Registering NetworkConnectionService " + ncsId); this.myId = ncsId; final Tuple<Identifier, InetSocketAddress> tuple = new Tuple<>(ncsId, (InetSocketAddress) this.transport.getLocalAddress()); LOG.log(Level.FINEST, "Binding {0} to NetworkConnectionService@({1})", new Object[]{tuple.getKey(), tuple.getValue()}); this.nameServiceRegisteringStage.onNext(tuple); } /** * Open a channel for destination identifier of NetworkConnectionService. * @param connectionFactoryId * @param remoteEndPointId * @throws NetworkException */ <T> Link<NetworkConnectionServiceMessage<T>> openLink( final Identifier connectionFactoryId, final Identifier remoteEndPointId) throws NetworkException { final Identifier remoteId = getEndPointIdWithConnectionFactoryId(connectionFactoryId, remoteEndPointId); try { final SocketAddress address = nameResolver.lookup(remoteId); if (address == null) { throw new NetworkException("Lookup " + remoteId + " is null"); } return transport.open(address, nsCodec, nsLinkListener); } catch(final Exception e) { throw new NetworkException(e); } } // TODO[JIRA REEF-637] Remove the deprecated method. /** * Open a channel for destination identifier of NetworkConnectionService. * @param remoteEndPointId * @throws NetworkException * @deprecated in 0.13. Use openLink(Identifier, Identifier) instead. */ @Deprecated <T> Link<NetworkConnectionServiceMessage<T>> openLink(final Identifier remoteEndPointId) throws NetworkException { try { final SocketAddress address = nameResolver.lookup(remoteEndPointId); if (address == null) { throw new NetworkException("Lookup " + remoteEndPointId + " is null"); } return transport.open(address, nsCodec, nsLinkListener); } catch(final Exception e) { throw new NetworkException(e); } } private Identifier getEndPointIdWithConnectionFactoryId( final Identifier connectionFactoryId, final Identifier endPointId) { final String identifier = connectionFactoryId.toString() + DELIMITER + endPointId.toString(); return idFactory.getNewInstance(identifier); } /** * Gets a ConnectionFactory. * @param connFactoryId the identifier of the ConnectionFactory */ @Override public <T> ConnectionFactory<T> getConnectionFactory(final Identifier connFactoryId) { final ConnectionFactory<T> connFactory = connFactoryMap.get(connFactoryId.toString()); if (connFactory == null) { throw new RuntimeException("Cannot find ConnectionFactory of " + connFactoryId + "."); } return connFactory; } // TODO[JIRA REEF-637] Remove the deprecated method. /** * @param ncsId network connection service identifier * @deprecated in 0.13. */ @Deprecated @Override public void unregisterId(final Identifier ncsId) { LOG.log(Level.FINEST, "Unbinding {0} to NetworkConnectionService@({1})", new Object[]{ncsId, this.transport.getLocalAddress()}); this.myId = null; this.nameServiceUnregisteringStage.onNext(ncsId); } // TODO[JIRA REEF-637] Remove the deprecated method. /** * @return the identifier of this NetworkConnectionService * @deprecated in 0.13. */ @Deprecated @Override public Identifier getNetworkConnectionServiceId() { return this.myId; } @Override public void close() throws Exception { if (isClosed.compareAndSet(false , true)) { LOG.log(Level.FINE, "Shutting down"); this.nameServiceRegisteringStage.close(); this.nameServiceUnregisteringStage.close(); this.nameResolver.close(); this.transport.close(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec; import java.io.Serializable; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.ql.DriverContext; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.io.StatsProvidingRecordReader; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec; import org.apache.hadoop.hive.ql.plan.StatsNoJobWork; import org.apache.hadoop.hive.ql.plan.api.StageType; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.mapred.FileSplit; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.StringUtils; import com.google.common.collect.Lists; import com.google.common.collect.MapMaker; import com.google.common.util.concurrent.ThreadFactoryBuilder; /** * StatsNoJobTask is used in cases where stats collection is the only task for the given query (no * parent MR or Tez job). It is used in the following cases 1) ANALYZE with partialscan/noscan for * file formats that implement StatsProvidingRecordReader interface: ORC format (implements * StatsProvidingRecordReader) stores column statistics for all columns in the file footer. Its much * faster to compute the table/partition statistics by reading the footer than scanning all the * rows. This task can be used for computing basic stats like numFiles, numRows, fileSize, * rawDataSize from ORC footer. **/ public class StatsNoJobTask extends Task<StatsNoJobWork> implements Serializable { private static final long serialVersionUID = 1L; private static transient final Log LOG = LogFactory.getLog(StatsNoJobTask.class); private static ConcurrentMap<String, Partition> partUpdates; private static Table table; private static String tableFullName; private static JobConf jc = null; public StatsNoJobTask() { super(); } @Override public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext) { super.initialize(conf, queryPlan, driverContext); jc = new JobConf(conf); } @Override public int execute(DriverContext driverContext) { LOG.info("Executing stats (no job) task"); String tableName = ""; ExecutorService threadPool = null; try { tableName = work.getTableSpecs().tableName; table = db.getTable(tableName); int numThreads = HiveConf.getIntVar(conf, ConfVars.HIVE_STATS_GATHER_NUM_THREADS); tableFullName = table.getDbName() + "." + table.getTableName(); threadPool = Executors.newFixedThreadPool(numThreads, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("StatsNoJobTask-Thread-%d") .build()); partUpdates = new MapMaker().concurrencyLevel(numThreads).makeMap(); LOG.info("Initialized threadpool for stats computation with " + numThreads + " threads"); } catch (HiveException e) { LOG.error("Cannot get table " + tableName, e); console.printError("Cannot get table " + tableName, e.toString()); } return aggregateStats(threadPool); } @Override public StageType getType() { return StageType.STATS; } @Override public String getName() { return "STATS-NO-JOB"; } class StatsCollection implements Runnable { private Partition partn; public StatsCollection(Partition part) { this.partn = part; } @Override public void run() { // get the list of partitions org.apache.hadoop.hive.metastore.api.Partition tPart = partn.getTPartition(); Map<String, String> parameters = tPart.getParameters(); try { Path dir = new Path(tPart.getSd().getLocation()); long numRows = 0; long rawDataSize = 0; long fileSize = 0; long numFiles = 0; FileSystem fs = dir.getFileSystem(conf); List<FileStatus> fileList = ShimLoader.getHadoopShims().listLocatedStatus(fs, dir, hiddenFileFilter); boolean statsAvailable = false; for(FileStatus file: fileList) { if (!file.isDir()) { InputFormat<?, ?> inputFormat = (InputFormat<?, ?>) ReflectionUtils.newInstance( partn.getInputFormatClass(), jc); InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0, new String[] { partn.getLocation() }); org.apache.hadoop.mapred.RecordReader<?, ?> recordReader = (org.apache.hadoop.mapred.RecordReader<?, ?>) inputFormat.getRecordReader(dummySplit, jc, Reporter.NULL); StatsProvidingRecordReader statsRR; if (recordReader instanceof StatsProvidingRecordReader) { statsRR = (StatsProvidingRecordReader) recordReader; rawDataSize += statsRR.getStats().getRawDataSize(); numRows += statsRR.getStats().getRowCount(); fileSize += file.getLen(); numFiles += 1; statsAvailable = true; } recordReader.close(); } } if (statsAvailable) { parameters.put(StatsSetupConst.ROW_COUNT, String.valueOf(numRows)); parameters.put(StatsSetupConst.RAW_DATA_SIZE, String.valueOf(rawDataSize)); parameters.put(StatsSetupConst.TOTAL_SIZE, String.valueOf(fileSize)); parameters.put(StatsSetupConst.NUM_FILES, String.valueOf(numFiles)); parameters.put(StatsSetupConst.STATS_GENERATED_VIA_STATS_TASK, StatsSetupConst.TRUE); partUpdates.put(tPart.getSd().getLocation(), new Partition(table, tPart)); // printout console and debug logs String threadName = Thread.currentThread().getName(); String msg = "Partition " + tableFullName + partn.getSpec() + " stats: [" + toString(parameters) + ']'; LOG.debug(threadName + ": " + msg); console.printInfo(msg); } else { String threadName = Thread.currentThread().getName(); String msg = "Partition " + tableFullName + partn.getSpec() + " does not provide stats."; LOG.debug(threadName + ": " + msg); } } catch (Exception e) { console.printInfo("[Warning] could not update stats for " + tableFullName + partn.getSpec() + ".", "Failed with exception " + e.getMessage() + "\n" + StringUtils.stringifyException(e)); // Before updating the partition params, if any partition params is null // and if statsReliable is true then updatePartition() function will fail // the task by returning 1 if (work.isStatsReliable()) { partUpdates.put(tPart.getSd().getLocation(), null); } } } private String toString(Map<String, String> parameters) { StringBuilder builder = new StringBuilder(); for (String statType : StatsSetupConst.supportedStats) { String value = parameters.get(statType); if (value != null) { if (builder.length() > 0) { builder.append(", "); } builder.append(statType).append('=').append(value); } } return builder.toString(); } } private int aggregateStats(ExecutorService threadPool) { int ret = 0; try { List<Partition> partitions = getPartitionsList(); // non-partitioned table if (partitions == null) { org.apache.hadoop.hive.metastore.api.Table tTable = table.getTTable(); Map<String, String> parameters = tTable.getParameters(); try { Path dir = new Path(tTable.getSd().getLocation()); long numRows = 0; long rawDataSize = 0; long fileSize = 0; long numFiles = 0; FileSystem fs = dir.getFileSystem(conf); List<FileStatus> fileList = ShimLoader.getHadoopShims().listLocatedStatus(fs, dir, hiddenFileFilter); boolean statsAvailable = false; for(FileStatus file: fileList) { if (!file.isDir()) { InputFormat<?, ?> inputFormat = (InputFormat<?, ?>) ReflectionUtils.newInstance( table.getInputFormatClass(), jc); InputSplit dummySplit = new FileSplit(file.getPath(), 0, 0, new String[] { table .getDataLocation().toString() }); org.apache.hadoop.mapred.RecordReader<?, ?> recordReader = (org.apache.hadoop.mapred.RecordReader<?, ?>) inputFormat .getRecordReader(dummySplit, jc, Reporter.NULL); StatsProvidingRecordReader statsRR; if (recordReader instanceof StatsProvidingRecordReader) { statsRR = (StatsProvidingRecordReader) recordReader; numRows += statsRR.getStats().getRowCount(); rawDataSize += statsRR.getStats().getRawDataSize(); fileSize += file.getLen(); numFiles += 1; statsAvailable = true; } recordReader.close(); } } if (statsAvailable) { parameters.put(StatsSetupConst.ROW_COUNT, String.valueOf(numRows)); parameters.put(StatsSetupConst.RAW_DATA_SIZE, String.valueOf(rawDataSize)); parameters.put(StatsSetupConst.TOTAL_SIZE, String.valueOf(fileSize)); parameters.put(StatsSetupConst.NUM_FILES, String.valueOf(numFiles)); parameters.put(StatsSetupConst.STATS_GENERATED_VIA_STATS_TASK, StatsSetupConst.TRUE); db.alterTable(tableFullName, new Table(tTable)); String msg = "Table " + tableFullName + " stats: [" + toString(parameters) + ']'; LOG.debug(msg); console.printInfo(msg); } else { String msg = "Table " + tableFullName + " does not provide stats."; LOG.debug(msg); } } catch (Exception e) { console.printInfo("[Warning] could not update stats for " + tableFullName + ".", "Failed with exception " + e.getMessage() + "\n" + StringUtils.stringifyException(e)); } } else { // Partitioned table for (Partition partn : partitions) { threadPool.execute(new StatsCollection(partn)); } LOG.debug("Stats collection waiting for threadpool to shutdown.."); shutdownAndAwaitTermination(threadPool); LOG.debug("Stats collection threadpool shutdown successful."); ret = updatePartitions(); } } catch (Exception e) { // Fail the query if the stats are supposed to be reliable if (work.isStatsReliable()) { ret = -1; } } // The return value of 0 indicates success, // anything else indicates failure return ret; } private int updatePartitions() throws InvalidOperationException, HiveException { if (!partUpdates.isEmpty()) { List<Partition> updatedParts = Lists.newArrayList(partUpdates.values()); if (updatedParts.contains(null) && work.isStatsReliable()) { LOG.debug("Stats requested to be reliable. Empty stats found and hence failing the task."); return -1; } else { LOG.debug("Bulk updating partitions.."); db.alterPartitions(tableFullName, Lists.newArrayList(partUpdates.values())); LOG.debug("Bulk updated " + partUpdates.values().size() + " partitions."); } } return 0; } private void shutdownAndAwaitTermination(ExecutorService threadPool) { // Disable new tasks from being submitted threadPool.shutdown(); try { // Wait a while for existing tasks to terminate if (!threadPool.awaitTermination(100, TimeUnit.SECONDS)) { // Cancel currently executing tasks threadPool.shutdownNow(); // Wait a while for tasks to respond to being cancelled if (!threadPool.awaitTermination(100, TimeUnit.SECONDS)) { LOG.debug("Stats collection thread pool did not terminate"); } } } catch (InterruptedException ie) { // Cancel again if current thread also interrupted threadPool.shutdownNow(); // Preserve interrupt status Thread.currentThread().interrupt(); } } private static final PathFilter hiddenFileFilter = new PathFilter() { public boolean accept(Path p) { String name = p.getName(); return !name.startsWith("_") && !name.startsWith("."); } }; private String toString(Map<String, String> parameters) { StringBuilder builder = new StringBuilder(); for (String statType : StatsSetupConst.supportedStats) { String value = parameters.get(statType); if (value != null) { if (builder.length() > 0) { builder.append(", "); } builder.append(statType).append('=').append(value); } } return builder.toString(); } private List<Partition> getPartitionsList() throws HiveException { if (work.getTableSpecs() != null) { tableSpec tblSpec = work.getTableSpecs(); table = tblSpec.tableHandle; if (!table.isPartitioned()) { return null; } else { return tblSpec.partitions; } } return null; } }
/* * ja, a Java-bytecode translator toolkit. * Copyright (C) 2004 Bill Burke. All Rights Reserved. * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. Alternatively, the contents of this file may be used under * the terms of the GNU Lesser General Public License Version 2.1 or later, * or the Apache License Version 2.0. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. */ package ja.bytecode.annotation; import ja.bytecode.ConstPool; import ja.bytecode.Descriptor; import ja.ClassPool; import ja.CtClass; import ja.CtMethod; import ja.NotFoundException; import java.io.IOException; import java.util.LinkedHashMap; import java.util.Set; import java.util.Iterator; /** * The <code>annotation</code> structure. * * <p>An instance of this class is returned by * <code>getAnnotations()</code> in <code>AnnotationsAttribute</code> * or in <code>ParameterAnnotationsAttribute</code>. * * @see ja.bytecode.AnnotationsAttribute#getAnnotations() * @see ja.bytecode.ParameterAnnotationsAttribute#getAnnotations() * @see MemberValue * @see MemberValueVisitor * @see AnnotationsWriter * * @author <a href="mailto:bill@jboss.org">Bill Burke</a> * @author Shigeru Chiba * @author <a href="mailto:adrian@jboss.org">Adrian Brock</a> */ public class Annotation { static class Pair { int name; MemberValue value; } ConstPool pool; int typeIndex; LinkedHashMap members; // this sould be LinkedHashMap // but it is not supported by JDK 1.3. /** * Constructs an annotation including no members. A member can be * later added to the created annotation by <code>addMemberValue()</code>. * * @param type the index into the constant pool table. * the entry at that index must be the * <code>CONSTANT_Utf8_Info</code> structure * repreenting the name of the annotation interface type. * @param cp the constant pool table. * * @see #addMemberValue(String, MemberValue) */ public Annotation(int type, ConstPool cp) { pool = cp; typeIndex = type; members = null; } /** * Constructs an annotation including no members. A member can be * later added to the created annotation by <code>addMemberValue()</code>. * * @param typeName the name of the annotation interface type. * @param cp the constant pool table. * * @see #addMemberValue(String, MemberValue) */ public Annotation(String typeName, ConstPool cp) { this(cp.addUtf8Info(Descriptor.of(typeName)), cp); } /** * Constructs an annotation that can be accessed through the interface * represented by <code>clazz</code>. The values of the members are * not specified. * * @param cp the constant pool table. * @param clazz the interface. * @throws NotFoundException when the clazz is not found */ public Annotation(ConstPool cp, CtClass clazz) throws NotFoundException { // todo Enums are not supported right now. this(cp.addUtf8Info(Descriptor.of(clazz.getName())), cp); if (!clazz.isInterface()) throw new RuntimeException( "Only interfaces are allowed for Annotation creation."); CtMethod methods[] = clazz.getDeclaredMethods(); if (methods.length > 0) { members = new LinkedHashMap(); } for (int i = 0; i < methods.length; i++) { CtClass returnType = methods[i].getReturnType(); addMemberValue(methods[i].getName(), createMemberValue(cp, returnType)); } } /** * Makes an instance of <code>MemberValue</code>. * * @param cp the constant pool table. * @param type the type of the member. * @return the member value * @throws NotFoundException when the type is not found */ public static MemberValue createMemberValue(ConstPool cp, CtClass type) throws NotFoundException { if (type == CtClass.booleanType) return new BooleanMemberValue(cp); else if (type == CtClass.byteType) return new ByteMemberValue(cp); else if (type == CtClass.charType) return new CharMemberValue(cp); else if (type == CtClass.shortType) return new ShortMemberValue(cp); else if (type == CtClass.intType) return new IntegerMemberValue(cp); else if (type == CtClass.longType) return new LongMemberValue(cp); else if (type == CtClass.floatType) return new FloatMemberValue(cp); else if (type == CtClass.doubleType) return new DoubleMemberValue(cp); else if (type.getName().equals("java.lang.Class")) return new ClassMemberValue(cp); else if (type.getName().equals("java.lang.String")) return new StringMemberValue(cp); else if (type.isArray()) { CtClass arrayType = type.getComponentType(); MemberValue member = createMemberValue(cp, arrayType); return new ArrayMemberValue(member, cp); } else if (type.isInterface()) { Annotation info = new Annotation(cp, type); return new AnnotationMemberValue(info, cp); } else { // treat as enum. I know this is not typed, // but JBoss has an Annotation Compiler for JDK 1.4 // and I want it to work with that. - Bill Burke EnumMemberValue emv = new EnumMemberValue(cp); emv.setType(type.getName()); return emv; } } /** * Adds a new member. * * @param nameIndex the index into the constant pool table. * The entry at that index must be * a <code>CONSTANT_Utf8_info</code> structure. * structure representing the member name. * @param value the member value. */ public void addMemberValue(int nameIndex, MemberValue value) { Pair p = new Pair(); p.name = nameIndex; p.value = value; addMemberValue(p); } /** * Adds a new member. * * @param name the member name. * @param value the member value. */ public void addMemberValue(String name, MemberValue value) { Pair p = new Pair(); p.name = pool.addUtf8Info(name); p.value = value; if (members == null) members = new LinkedHashMap(); members.put(name, p); } private void addMemberValue(Pair pair) { String name = pool.getUtf8Info(pair.name); if (members == null) members = new LinkedHashMap(); members.put(name, pair); } /** * Returns a string representation of the annotation. */ public String toString() { StringBuffer buf = new StringBuffer("@"); buf.append(getTypeName()); if (members != null) { buf.append("("); Iterator mit = members.keySet().iterator(); while (mit.hasNext()) { String name = (String)mit.next(); buf.append(name).append("=").append(getMemberValue(name)); if (mit.hasNext()) buf.append(", "); } buf.append(")"); } return buf.toString(); } /** * Obtains the name of the annotation type. * * @return the type name */ public String getTypeName() { return Descriptor.toClassName(pool.getUtf8Info(typeIndex)); } /** * Obtains all the member names. * * @return null if no members are defined. */ public Set getMemberNames() { if (members == null) return null; else return members.keySet(); } /** * Obtains the member value with the given name. * * <p>If this annotation does not have a value for the * specified member, * this method returns null. It does not return a * <code>MemberValue</code> with the default value. * The default value can be obtained from the annotation type. * * @param name the member name * @return null if the member cannot be found or if the value is * the default value. * * @see ja.bytecode.AnnotationDefaultAttribute */ public MemberValue getMemberValue(String name) { if (members == null) return null; else { Pair p = (Pair)members.get(name); if (p == null) return null; else return p.value; } } /** * Constructs an annotation-type object representing this annotation. * For example, if this annotation represents <code>@Author</code>, * this method returns an <code>Author</code> object. * * @param cl class loader for loading an annotation type. * @param cp class pool for obtaining class files. * @return the annotation * @throws ClassNotFoundException if the class cannot found. * @throws NoSuchClassError if the class linkage fails. */ public Object toAnnotationType(ClassLoader cl, ClassPool cp) throws ClassNotFoundException, NoSuchClassError { return AnnotationImpl.make(cl, MemberValue.loadClass(cl, getTypeName()), cp, this); } /** * Writes this annotation. * * @param writer the output. * @throws IOException for an error during the write */ public void write(AnnotationsWriter writer) throws IOException { String typeName = pool.getUtf8Info(typeIndex); if (members == null) { writer.annotation(typeName, 0); return; } writer.annotation(typeName, members.size()); Iterator it = members.values().iterator(); while (it.hasNext()) { Pair pair = (Pair)it.next(); writer.memberValuePair(pair.name); pair.value.write(writer); } } /** * Returns true if the given object represents the same annotation * as this object. The equality test checks the member values. */ public boolean equals(Object obj) { if (obj == this) return true; if (obj == null || obj instanceof Annotation == false) return false; Annotation other = (Annotation) obj; if (getTypeName().equals(other.getTypeName()) == false) return false; LinkedHashMap otherMembers = other.members; if (members == otherMembers) return true; else if (members == null) return otherMembers == null; else if (otherMembers == null) return false; else return members.equals(otherMembers); } }
package com.gamerecorder.fragment; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Set; import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.app.Fragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.LinearLayout; import android.widget.ListView; import android.widget.TextView; import com.gamerecorder.activity.R; import com.gamerecorder.activity.VSHistoryDetailsActivity; import com.gamerecorder.adapter.AbstractPinnedSectionListAdapter; import com.gamerecorder.adapter.AbstractPinnedSectionListAdapter.Item; import com.gamerecorder.db.dao.GameResultDao; import com.gamerecorder.db.model.GameResult; import com.gamerecorder.events.TeamVSHistoryChangeEvent; import com.gamerecorder.interfaces.Identity; import com.gamerecorder.interfaces.ListViewDelSelectedItemCallback; import com.gamerecorder.util.Constants; import com.gamerecorder.widget.ListViewActionMode; import com.gamerecorder.widget.PinnedSectionListView; import de.greenrobot.event.EventBus; public class BasketballVSHistoryFragment extends Fragment implements ListViewDelSelectedItemCallback { private final static String TAG = "BasketballVSHistoryFragment"; private PinnedSectionListView listView; private SectionListSimpleAdapter adapter; private List<Item> items; private GameResultDao resultDao; private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.getDefault()); @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); EventBus.getDefault().register(this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View v = inflater.inflate(R.layout.fragment_basketball_vs_history, container, false); listView = (PinnedSectionListView) v .findViewById(R.id.team_vs_history_list); items = new ArrayList<Item>(); //adapter = new SectionListSimpleAdapter(getActivity(), items); adapter = new SectionListSimpleAdapter(getActivity(), R.layout.team_vs_history_list_item, items); listView.setAdapter(adapter); listView.setEmptyView(v.findViewById(R.id.team_vs_history_list_empty)); listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE_MODAL); listView.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapter, View view, int position,long id) { if(view instanceof LinearLayout && ((LinearLayout)view).getTag() != null){ Log.d(TAG, ((LinearLayout)view).getTag().toString()); Intent intent = new Intent(); intent.putExtra(Constants.GAME_RESULT_ID, Integer.valueOf(((LinearLayout)view).getTag().toString())); intent.setClass(getActivity(), VSHistoryDetailsActivity.class); startActivity(intent); } } }); listView.setMultiChoiceModeListener(new ListViewActionMode<Item>(getActivity(), listView, adapter,this)); return v; } public void onEvent(TeamVSHistoryChangeEvent event) { new LoadGameResultAsyncTask(getActivity(),event.getTeams()).execute(); } private class SectionListSimpleAdapter extends AbstractPinnedSectionListAdapter{ public SectionListSimpleAdapter(Context context, int resource,List<Item> items){ super(context, resource, items); } @Override public View getView(int position, View convertView, ViewGroup parent) { LinearLayout layout = null,dateLayout,dataLayout; Item item = getItem(position); if (convertView == null) { layout = (LinearLayout) mInflater.inflate(mResource, null); } else{ layout = (LinearLayout)convertView; } dateLayout = (LinearLayout)layout.findViewById(R.id.date_label_container); dataLayout = (LinearLayout)layout.findViewById(R.id.data_label_container); if(item.getViewType() == Item.SECTION){ dateLayout.setVisibility(View.VISIBLE); dataLayout.setVisibility(View.GONE); ((TextView)dateLayout.findViewById(R.id.start_date)).setText(item.getStartDate()); } else{ dateLayout.setVisibility(View.GONE); dataLayout.setVisibility(View.VISIBLE); layout.setTag(item.getId()); ((TextView)dataLayout.findViewById(R.id.start_time)).setText(item.getStartTime()); ((TextView)dataLayout.findViewById(R.id.end_time)).setText(item.getEndTime()); ((TextView)dataLayout.findViewById(R.id.data)).setText(item.getDesc()); } return layout; } } /*private class SectionListSimpleAdapter extends ArrayAdapter<Item> implements PinnedSectionListAdapter { protected LayoutInflater mInflater; private static final int mResource = R.layout.team_vs_history_list_item; public SectionListSimpleAdapter(Context context, List<Item> items) { super(context, mResource, items); mInflater = (LayoutInflater) context .getSystemService(Context.LAYOUT_INFLATER_SERVICE); } @Override public View getView(int position, View convertView, ViewGroup parent) { LinearLayout layout = null,dateLayout,dataLayout; Item item = getItem(position); if (convertView == null) { layout = (LinearLayout) mInflater.inflate(mResource, null); } else{ layout = (LinearLayout)convertView; } dateLayout = (LinearLayout)layout.findViewById(R.id.date_label_container); dataLayout = (LinearLayout)layout.findViewById(R.id.data_label_container); if(item.getType() == Item.SECTION){ dateLayout.setVisibility(View.VISIBLE); dataLayout.setVisibility(View.GONE); ((TextView)dateLayout.findViewById(R.id.start_date)).setText(item.getStartDate()); } else{ dateLayout.setVisibility(View.GONE); dataLayout.setVisibility(View.VISIBLE); layout.setTag(item.getId()); ((TextView)dataLayout.findViewById(R.id.start_time)).setText(item.getStartTime()); ((TextView)dataLayout.findViewById(R.id.end_time)).setText(item.getEndTime()); ((TextView)dataLayout.findViewById(R.id.data)).setText(item.getDesc()); } return layout; } @Override public int getViewTypeCount() { return 2; } @Override public int getItemViewType(int position) { return getItem(position).getType(); } @Override public boolean isItemViewTypePinned(int viewType) { return viewType == Item.SECTION; } } private class Item implements Identity{ public static final int ITEM = 0; public static final int SECTION = 1; private int type,id; private String desc; private Date start, end; public int getId() { return id; } public String getStartTime() { return sdf.format(start).split(" ")[1]; } public String getEndTime() { return sdf.format(end).split(" ")[1]; } public String getStartDate() { return sdf.format(start).split(" ")[0]; } public String getDesc() { return desc; } public int getType() { return type; } public Item(int type, Date start) { this.type = type; this.start = start; } public Item(int type, int id,String desc, Date start, Date end) { this.type = type; this.id = id; this.desc = desc; this.start = start; this.end = end; } @Override public String toString() { return desc + "(" + start + ")"; } }*/ private class LoadGameResultAsyncTask extends AsyncTask<Void, Void, Void>{ private Context ctx = null; private ProgressDialog proDialog; private String [] teams; private LoadGameResultAsyncTask(Context ctx,String[] teams){ this.ctx = ctx; this.teams = teams; } @Override protected void onPreExecute() { proDialog = ProgressDialog.show(this.ctx, this.ctx.getResources().getString(R.string.vs_history_loading_title), this.ctx.getResources().getString(R.string.vs_history_loading_message)); } @Override protected Void doInBackground(Void... params) { List<GameResult> results = resultDao.queryByGameKind(ctx.getResources().getString(R.string.basketball_name_en)); Set<String> startDateSet = new HashSet<String>(); Iterator<GameResult> iter = results.iterator(); while(iter.hasNext()){ GameResult result = iter.next(); if(!result.isGamingTeams(this.teams) || result.getEndDate() == null){ iter.remove(); } } Collections.sort(results, new Comparator<GameResult>() { @Override public int compare(GameResult item1, GameResult item2) { return item1.getStartDate().compareTo(item2.getStartDate()); } }); items.clear(); for(GameResult result:results){ Item item; String startDate = sdf.format(result.getStartDate()).split(" ")[0]; if(!startDateSet.contains(startDate)){ item = new Item(Item.SECTION,result.getStartDate()); items.add(item); startDateSet.add(startDate); } item = new Item(Item.ITEM,result.getId(), result.getHistoryRecordDesc(), result.getStartDate(), result.getEndDate()); items.add(item); } //remove the same team record if(new HashSet<String>(Arrays.asList(teams)).size() == 1)items.clear(); return null; } @Override protected void onPostExecute(Void param) { adapter.notifyDataSetChanged(); proDialog.dismiss(); } } @Override public void deleteSelectedItems(List<Identity> selectedItems) { resultDao.delById(selectedItems); } @Override public void onAttach(Activity activity) { super.onAttach(activity); resultDao = new GameResultDao(activity); } @Override public void onStop() { super.onStop(); EventBus.getDefault().unregister(this); } }
/* *********************************************************************** * * Copyright (C) 2005-2012, International Business Machines Corporation and * others. All Rights Reserved. * *********************************************************************** * * euc_tool * * This tool produces the character usage frequency statistics for the EUC family * of charsets, for use by the ICU charset detectors. * * usage: java euc_tool [-d] [directory path] * * -d: Produce the data in a form to be exported to the ICU implementation * Default is to produce an informative dump. * * directory path * Source directory for the files to be analyzed. * Default is the current directory. * There should be three subdirectories under the specified directory, one * each for EUC_JP, EUC_CN and EUC_KR. Within each of these subdirectories * should be text files in the specified encoding. * */ package com.ibm.icu.dev.tool.charsetdet.mbcs; import java.io.File; import java.io.FileInputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; public class EUCTool { // The file buffer and file data length need to be out in class member variables // so that the code lifted from charSet detection for scanning the multi-byte chars // can see them conveniently. byte [] buf = new byte[1000000]; int fileSize; boolean option_d = false; // data option. Produce exportable data boolean option_v = true; // verbose informaional output. public static void main(String[] args) { EUCTool This = new EUCTool(); This.Main(args); } void Main(String[] args) { int i; // // Command Line Option Handling // String dirName = "."; for (i=0; i<args.length; i++) { if (args[i].equals("-d")) { option_d = true; option_v = false; continue; } if (args[i].startsWith("-")) { System.err.println("Unrecongized option: " + args[i]); System.exit(-1); } dirName = args[i]; } // // Verify that the specified directory exists. // File dir = new File(dirName); if (dir.isDirectory() == false) { System.err.println("\"" + dirName + "\" is not a directory"); System.exit(-1); } // // Do each subdirectory of the specified directory. There should be // one per each encoding - euc-kr, euc-cn, euc-jp // File[] dirs = dir.listFiles(); for (i=0; i<dirs.length; i++) { if (dirs[i].isDirectory()) { String nam = dirs[i].getName(); if (nam.equalsIgnoreCase("CVS")) { continue; } processDir(dirs[i]); } } } // // Collect statistics from all ordinary files in a specified directory. // void processDir(File dir) { int totalMbcsChars = 0; HashMap m = new HashMap(10000); int i; System.out.println(dir.getName()); File[] files = dir.listFiles(); for (i=0; i<files.length; i++) { FileInputStream is = null; try { if (files[i].isFile()) { is = new FileInputStream(files[i]); fileSize = is.read(buf); if (option_v) { System.out.println(files[i].getPath()); System.out.println(" " + fileSize + " bytes."); } iteratedChar ichar = new iteratedChar(); int fileChars = 0; int fileMbcsChars = 0; int errs = 0; while (nextChar(ichar)) { if (ichar.error == true) { errs++; continue; } fileChars++; if (ichar.charValue > 255) { fileMbcsChars++; totalMbcsChars++; } if (ichar.charValue <= 255) { // Don't keep occurence statistics for the single byte range continue; } // // Frequency of occurence statistics are accumulated in a map. // ChEl keyEl = new ChEl(ichar.charValue, 0); ChEl valEl = (ChEl)m.get(keyEl); if (valEl == null) { m.put(keyEl, keyEl); valEl = keyEl; } valEl.occurences++; } if (option_v) { System.out.println(" " + fileChars + " Chars"); System.out.println(" " + fileMbcsChars + " mbcs Chars"); System.out.println(" " + errs + " errors"); System.out.println("\n"); } } } catch (Exception e) { System.err.println("Exception:" + e); } finally { if (is != null) { try { is.close(); } catch (Exception e) { // ignore } } } } // // We've processed through all of the files. // sort and dump out the frequency statistics. // Object [] encounteredChars = m.values().toArray(); Arrays.sort(encounteredChars); int cumulativeChars = 0; int cumulativePercent = 0; if (option_v) { System.out.println("# <char code> <occurences> <Cumulative %>"); for (i=0; i<encounteredChars.length; i++) { ChEl c = (ChEl)encounteredChars[i]; cumulativeChars += c.occurences; cumulativePercent = cumulativeChars*100/totalMbcsChars; System.out.println(i + " " + Integer.toHexString(c.charCode) + " " + c.occurences + " " + cumulativePercent); } } if (option_d) { // // Output the list of characters formatted for pasting into a // Java source code array initializer. // Resort into order based on the character code value, not // on frequency of occurence. // List charList = new ArrayList(); for (i=0; i<100 && cumulativePercent<50; i++) { ChEl c = (ChEl)encounteredChars[i]; cumulativeChars += c.occurences; cumulativePercent = cumulativeChars*100/totalMbcsChars; charList.add(new Integer(c.charCode)); } Object [] sortedChars = charList.toArray(); Arrays.sort(sortedChars); System.out.print(" {"); for (i=0; i<sortedChars.length; i++) { if (i != 0) { System.out.print(", "); if ((i)%10 == 0) { System.out.print("\n "); } } int cp = ((Integer)sortedChars[i]).intValue(); System.out.print("0x" + Integer.toHexString(cp)); } System.out.println("};"); } } // // This is a little class containing a // multi-byte character value and an occurence count for that char. // Instances of this class are kept in the collection that accumulates statistics // // WARNING: this class's natural ordering (from Comparable) and equals() // are inconsistent. static class ChEl implements Comparable { int charCode; int occurences; ChEl(int c, int o) { charCode = c; occurences = o; } // Equals needs to work with a map, with the charCode as the key. // For insertion/lookup, we care about the char code only, not the occurence count. public boolean equals(Object other) { ChEl o = (ChEl)other; return o.charCode == this.charCode; } // Hashcode needs to be compatible with equals // We're using this in a hashMap! public int hashCode() { return charCode; } // We want to be able to sort the results by frequency of occurence // Compare backwards. We want most frequent chars first. public int compareTo(Object other) { ChEl o = (ChEl)other; return (this.occurences> o.occurences? -1 : (this.occurences==o.occurences? 0 : 1)); } } // // iteratedChar is copied and slightly hacked from the similar calss in CharsetRecog_mbcs // Pulls out one logical char according to the rules of EUC encoding. // class iteratedChar { int charValue = 0; // The char value is a value from the encoding. // It's meaning is not well defined, other than // different encodings int index = 0; int nextIndex = 0; boolean error = false; boolean done = false; void reset() { charValue = 0; index = -1; nextIndex = 0; error = false; done = false; } int nextByte() { if (nextIndex >= fileSize) { done = true; return -1; } int byteValue = (int)buf[nextIndex++] & 0x00ff; return byteValue; } } boolean nextChar(iteratedChar it) { it.index = it.nextIndex; it.error = false; int firstByte = 0; int secondByte = 0; int thirdByte = 0; int fourthByte = 0; buildChar: { firstByte = it.charValue = it.nextByte(); if (firstByte < 0) { // Ran off the end of the input data it.done = true; break buildChar; } if (firstByte <= 0x8d) { // single byte char break buildChar; } secondByte = it.nextByte(); it.charValue = (it.charValue << 8) | secondByte; if (firstByte >= 0xA1 && firstByte <= 0xfe) { // Two byte Char if (secondByte < 0xa1) { it.error = true; } break buildChar; } if (firstByte == 0x8e) { // Code Set 2. // In EUC-JP, total char size is 2 bytes, only one byte of actual char value. // In EUC-TW, total char size is 4 bytes, three bytes contribute to char value. // We don't know which we've got. // Treat it like EUC-JP. If the data really was EUC-TW, the following two // bytes will look like a well formed 2 byte char. if (secondByte < 0xa1) { it.error = true; } break buildChar; } if (firstByte == 0x8f) { // Code set 3. // Three byte total char size, two bytes of actual char value. thirdByte = it.nextByte(); it.charValue = (it.charValue << 8) | thirdByte; if (thirdByte < 0xa1) { it.error = true; } } } if (it.error) { System.out.println("Error " + Integer.toHexString(firstByte) + " " + Integer.toHexString(secondByte) + " " + Integer.toHexString(thirdByte) + " " + Integer.toHexString(fourthByte)); } return (it.done == false); } }
package com.gschat; import com.gsrpc.Device; import com.gsrpc.KV; import java.nio.ByteBuffer; import com.gsrpc.Writer; import com.gsrpc.Reader; public class Mail { private long iD = 0; private int sQID = 0; private long tS = 0; private String sender = ""; private String receiver = ""; private MailType type = MailType.Single; private String content = ""; private Attachment[] attachments = new Attachment[0]; private byte[] extension = new byte[0]; public Mail(){ } public Mail(long iD, int sQID, long tS, String sender, String receiver, MailType type, String content, Attachment[] attachments, byte[] extension ) { this.iD = iD; this.sQID = sQID; this.tS = tS; this.sender = sender; this.receiver = receiver; this.type = type; this.content = content; this.attachments = attachments; this.extension = extension; } public long getID() { return this.iD; } public void setID(long arg) { this.iD = arg; } public int getSQID() { return this.sQID; } public void setSQID(int arg) { this.sQID = arg; } public long getTS() { return this.tS; } public void setTS(long arg) { this.tS = arg; } public String getSender() { return this.sender; } public void setSender(String arg) { this.sender = arg; } public String getReceiver() { return this.receiver; } public void setReceiver(String arg) { this.receiver = arg; } public MailType getType() { return this.type; } public void setType(MailType arg) { this.type = arg; } public String getContent() { return this.content; } public void setContent(String arg) { this.content = arg; } public Attachment[] getAttachments() { return this.attachments; } public void setAttachments(Attachment[] arg) { this.attachments = arg; } public byte[] getExtension() { return this.extension; } public void setExtension(byte[] arg) { this.extension = arg; } public void marshal(Writer writer) throws Exception { writer.writeByte((byte)9); writer.writeByte((byte)com.gsrpc.Tag.I64.getValue()); writer.writeUInt64(iD); writer.writeByte((byte)com.gsrpc.Tag.I32.getValue()); writer.writeUInt32(sQID); writer.writeByte((byte)com.gsrpc.Tag.I64.getValue()); writer.writeUInt64(tS); writer.writeByte((byte)com.gsrpc.Tag.String.getValue()); writer.writeString(sender); writer.writeByte((byte)com.gsrpc.Tag.String.getValue()); writer.writeString(receiver); writer.writeByte((byte)com.gsrpc.Tag.I8.getValue()); type.marshal(writer); writer.writeByte((byte)com.gsrpc.Tag.String.getValue()); writer.writeString(content); writer.writeByte((byte)((com.gsrpc.Tag.Table.getValue() << 4)|com.gsrpc.Tag.List.getValue())); writer.writeUInt16((short)attachments.length); for(Attachment v3 : attachments){ v3.marshal(writer); } writer.writeByte((byte)((com.gsrpc.Tag.I8.getValue() << 4)|com.gsrpc.Tag.List.getValue())); writer.writeBytes(extension); } public void unmarshal(Reader reader) throws Exception { byte __fields = reader.readByte(); { byte tag = reader.readByte(); if(tag != com.gsrpc.Tag.Skip.getValue()) { iD = reader.readUInt64(); } if(-- __fields == 0) { return; } } { byte tag = reader.readByte(); if(tag != com.gsrpc.Tag.Skip.getValue()) { sQID = reader.readUInt32(); } if(-- __fields == 0) { return; } } { byte tag = reader.readByte(); if(tag != com.gsrpc.Tag.Skip.getValue()) { tS = reader.readUInt64(); } if(-- __fields == 0) { return; } } { byte tag = reader.readByte(); if(tag != com.gsrpc.Tag.Skip.getValue()) { sender = reader.readString(); } if(-- __fields == 0) { return; } } { byte tag = reader.readByte(); if(tag != com.gsrpc.Tag.Skip.getValue()) { receiver = reader.readString(); } if(-- __fields == 0) { return; } } { byte tag = reader.readByte(); if(tag != com.gsrpc.Tag.Skip.getValue()) { type = MailType.unmarshal(reader); } if(-- __fields == 0) { return; } } { byte tag = reader.readByte(); if(tag != com.gsrpc.Tag.Skip.getValue()) { content = reader.readString(); } if(-- __fields == 0) { return; } } { byte tag = reader.readByte(); if(tag != com.gsrpc.Tag.Skip.getValue()) { int max3 = reader.readUInt16(); attachments = new Attachment[max3]; for(int i3 = 0; i3 < max3; i3 ++ ){ Attachment v3 = new Attachment(); v3.unmarshal(reader); attachments[i3] = v3; } } if(-- __fields == 0) { return; } } { byte tag = reader.readByte(); if(tag != com.gsrpc.Tag.Skip.getValue()) { extension = reader.readBytes(); } if(-- __fields == 0) { return; } } for(int i = 0; i < (int)__fields; i ++) { byte tag = reader.readByte(); if (tag == com.gsrpc.Tag.Skip.getValue()) { continue; } reader.readSkip(tag); } } }
/* * Copyright 2015 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.cellbase.server.ws; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.module.jsonSchema.JsonSchema; import com.fasterxml.jackson.module.jsonSchema.factories.SchemaFactoryWrapper; import com.google.common.base.Splitter; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import org.opencb.cellbase.core.CellBaseConfiguration; import org.opencb.cellbase.core.db.DBAdaptorFactory; import org.opencb.cellbase.server.exception.SpeciesException; import org.opencb.cellbase.server.exception.VersionException; import org.opencb.commons.datastore.core.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.*; import javax.ws.rs.QueryParam; import javax.ws.rs.core.*; import javax.ws.rs.core.Response.ResponseBuilder; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.*; @Path("/{version}/{species}") @Produces("text/plain") //@Api(value = "Generic", description = "Generic RESTful Web Services API") public class GenericRestWSServer implements IWSServer { // @DefaultValue("") // @PathParam("version") // @ApiParam(name = "version", value = "Use 'latest' for last stable version", defaultValue = "latest") protected String version; // @DefaultValue("") // @PathParam("species") // @ApiParam(name = "species", value = "Name of the species, e.g.: hsapiens.") protected String species; @ApiParam(name = "genome assembly", value = "Set the reference genome assembly, e.g. grch38. For a full list of" + "potentially available assemblies, please refer to: " + "http://bioinfo.hpc.cam.ac.uk/cellbase/webservices/rest/latest/meta/species") @DefaultValue("") @QueryParam("assembly") protected String assembly; @ApiParam(name = "exclude", value = "Set which fields are excluded in the response, e.g.: transcripts.exons. " + " Please note that this option may not be enabled for all web services.") @DefaultValue("") @QueryParam("exclude") protected String exclude; @DefaultValue("") @QueryParam("include") @ApiParam(name = "include", value = "Set which fields are included in the response, e.g.: transcripts.id. " + " Please note that this parameter may not be enabled for all web services.") protected String include; @DefaultValue("-1") @QueryParam("limit") @ApiParam(name = "limit", value = "Max number of results to be returned. No limit applied when -1." + " Please note that this option may not be available for all web services.") protected int limit; @DefaultValue("-1") @QueryParam("skip") @ApiParam(name = "skip", value = "Number of results to be skipped. No skip applied when -1. " + " Please note that this option may not be available for all web services.") protected int skip; @DefaultValue("false") @QueryParam("count") @ApiParam(name = "count", value = "Get a count of the number of results obtained. Deactivated by default. " + " Please note that this option may not be available for all web services.", defaultValue = "false", allowableValues = "false,true") protected String count; @DefaultValue("json") @QueryParam("of") @ApiParam(name = "Output format", value = "Output format, Protobuf is not yet implemented", defaultValue = "json", allowableValues = "json,pb (Not implemented yet)") protected String outputFormat; protected Query query; protected QueryOptions queryOptions; protected QueryResponse queryResponse; protected UriInfo uriInfo; protected HttpServletRequest httpServletRequest; protected static ObjectMapper jsonObjectMapper; protected static ObjectWriter jsonObjectWriter; protected long startTime; protected long endTime; protected static Logger logger; /** * Loading properties file just one time to be more efficient. All methods * will check parameters so to avoid extra operations this config can load * versions and species */ protected static CellBaseConfiguration cellBaseConfiguration; //= new CellBaseConfiguration() /** * DBAdaptorFactory creation, this object can be initialize with an * HibernateDBAdaptorFactory or an HBaseDBAdaptorFactory. This object is a * factory for creating adaptors like GeneDBAdaptor */ protected static DBAdaptorFactory dbAdaptorFactory; protected static org.opencb.cellbase.core.api.DBAdaptorFactory dbAdaptorFactory2; private static final int LIMIT_DEFAULT = 1000; private static final int LIMIT_MAX = 5000; static { logger = LoggerFactory.getLogger("org.opencb.cellbase.server.ws.GenericRestWSServer"); logger.info("Static block, creating MongoDBAdapatorFactory"); try { if (System.getenv("CELLBASE_HOME") != null) { logger.info("Loading configuration from '{}'", System.getenv("CELLBASE_HOME") + "/configuration.json"); cellBaseConfiguration = CellBaseConfiguration .load(new FileInputStream(new File(System.getenv("CELLBASE_HOME") + "/configuration.json"))); } else { logger.info("Loading configuration from '{}'", CellBaseConfiguration.class.getClassLoader().getResourceAsStream("configuration.json").toString()); cellBaseConfiguration = CellBaseConfiguration .load(CellBaseConfiguration.class.getClassLoader().getResourceAsStream("configuration.json")); } // If Configuration has been loaded we can create the DBAdaptorFactory // dbAdaptorFactory = new MongoDBAdaptorFactory(cellBaseConfiguration); dbAdaptorFactory2 = new org.opencb.cellbase.mongodb.impl.MongoDBAdaptorFactory(cellBaseConfiguration); } catch (IOException e) { e.printStackTrace(); } jsonObjectMapper = new ObjectMapper(); jsonObjectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); // jsonObjectMapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false); jsonObjectMapper.configure(MapperFeature.REQUIRE_SETTERS_FOR_GETTERS, true); jsonObjectWriter = jsonObjectMapper.writer(); } public GenericRestWSServer(@PathParam("version") String version, @Context UriInfo uriInfo, @Context HttpServletRequest hsr) throws VersionException, SpeciesException { this.version = version; this.uriInfo = uriInfo; this.httpServletRequest = hsr; logger.debug("Executing GenericRestWSServer constructor with no Species"); init(false); } public GenericRestWSServer(@PathParam("version") String version, @PathParam("species") String species, @Context UriInfo uriInfo, @Context HttpServletRequest hsr) throws VersionException, SpeciesException { this.version = version; this.species = species; this.uriInfo = uriInfo; this.httpServletRequest = hsr; logger.debug("Executing GenericRestWSServer constructor"); init(true); } protected void init(boolean checkSpecies) throws VersionException, SpeciesException { startTime = System.currentTimeMillis(); query = new Query(); // This needs to be an ArrayList since it may be added some extra fields later queryOptions = new QueryOptions("exclude", new ArrayList<>(Arrays.asList("_id", "_chunkIds"))); queryResponse = new QueryResponse(); checkPathParams(checkSpecies); } private void checkPathParams(boolean checkSpecies) throws VersionException, SpeciesException { if (version == null) { throw new VersionException("Version not valid: '" + version + "'"); } if (checkSpecies && species == null) { throw new SpeciesException("Species not valid: '" + species + "'"); } /** * Check version parameter, must be: v1, v2, ... If 'latest' then is * converted to appropriate version */ if (version.equalsIgnoreCase("latest")) { version = cellBaseConfiguration.getVersion(); logger.info("Version 'latest' detected, setting version parameter to '{}'", version); } else { // FIXME this will only work when no database schemas are done, in version 3 and 4 this can raise some problems // we set the version from the URL, this will decide which database is queried, cellBaseConfiguration.setVersion(version); } if (!version.equalsIgnoreCase("v3") && !cellBaseConfiguration.getVersion().equalsIgnoreCase(this.version)) { logger.error("Version '{}' does not match configuration '{}'", this.version, cellBaseConfiguration.getVersion()); throw new VersionException("Version not valid: '" + version + "'"); } } @Override public void parseQueryParams() { MultivaluedMap<String, String> multivaluedMap = uriInfo.getQueryParameters(); queryOptions.put("metadata", multivaluedMap.get("metadata") == null || multivaluedMap.get("metadata").get(0).equals("true")); if (exclude != null && !exclude.isEmpty()) { // We add the user's 'exclude' fields to the default values _id and _chunks if (queryOptions.containsKey("exclude")) { queryOptions.getAsStringList("exclude").addAll(Splitter.on(",").splitToList(exclude)); } } // else { // queryOptions.put("exclude", (multivaluedMap.get("exclude") != null) // ? Splitter.on(",").splitToList(multivaluedMap.get("exclude").get(0)) // : null); // } if (include != null && !include.isEmpty()) { queryOptions.put("include", new LinkedList<>(Splitter.on(",").splitToList(include))); } else { queryOptions.put("include", (multivaluedMap.get("include") != null) ? Splitter.on(",").splitToList(multivaluedMap.get("include").get(0)) : null); } queryOptions.put("limit", (limit > 0) ? Math.min(limit, LIMIT_MAX) : LIMIT_DEFAULT); queryOptions.put("skip", (skip >= 0) ? skip : -1); queryOptions.put("count", (count != null && !count.equals("")) && Boolean.parseBoolean(count)); // outputFormat = (outputFormat != null && !outputFormat.equals("")) ? outputFormat : "json"; // Add all the others QueryParams from the URL for (Map.Entry<String, List<String>> entry : multivaluedMap.entrySet()) { if (!queryOptions.containsKey(entry.getKey())) { // logger.info("Adding '{}' to queryOptions", entry); // FIXME delete this!! queryOptions.put(entry.getKey(), entry.getValue().get(0)); query.put(entry.getKey(), entry.getValue().get(0)); } } } @GET @Path("/help") @ApiOperation(httpMethod = "GET", value = "To be implemented", response = QueryResponse.class, hidden = true) public Response help() { return createOkResponse("No help available"); } @GET public Response defaultMethod() { switch (species) { case "echo": return createStringResponse("Status active"); default: break; } return createOkResponse("Not valid option"); } protected Response createModelResponse(Class clazz) { try { ObjectMapper mapper = new ObjectMapper(); SchemaFactoryWrapper visitor = new SchemaFactoryWrapper(); mapper.acceptJsonFormatVisitor(mapper.constructType(clazz), visitor); JsonSchema jsonSchema = visitor.finalSchema(); return createOkResponse(jsonSchema); } catch (Exception e) { return createErrorResponse(e); } } protected Response createErrorResponse(Exception e) { // First we print the exception in Server logs e.printStackTrace(); // Now we prepare the response to client queryResponse = new QueryResponse(); queryResponse.setTime(new Long(System.currentTimeMillis() - startTime).intValue()); queryResponse.setApiVersion(version); queryResponse.setQueryOptions(queryOptions); queryResponse.setError(e.toString()); QueryResult<ObjectMap> result = new QueryResult(); result.setWarningMsg("Future errors will ONLY be shown in the QueryResponse body"); result.setErrorMsg("DEPRECATED: " + e.toString()); queryResponse.setResponse(Arrays.asList(result)); return Response .fromResponse(createJsonResponse(queryResponse)) .status(Response.Status.INTERNAL_SERVER_ERROR) .build(); } protected Response createErrorResponse(String method, String errorMessage) { try { return buildResponse(Response.ok(jsonObjectWriter.writeValueAsString(new HashMap<>().put("[ERROR] " + method, errorMessage)), MediaType.APPLICATION_JSON_TYPE)); } catch (Exception e) { return createErrorResponse(e); } } protected Response createOkResponse(Object obj) { queryResponse = new QueryResponse(); queryResponse.setTime(new Long(System.currentTimeMillis() - startTime).intValue()); queryResponse.setApiVersion(version); queryResponse.setQueryOptions(queryOptions); // Guarantee that the QueryResponse object contains a list of results List list; if (obj instanceof List) { list = (List) obj; } else { list = new ArrayList(1); list.add(obj); } queryResponse.setResponse(list); return createJsonResponse(queryResponse); } protected Response createOkResponse(Object obj, MediaType mediaType) { return buildResponse(Response.ok(obj, mediaType)); } protected Response createOkResponse(Object obj, MediaType mediaType, String fileName) { return buildResponse(Response.ok(obj, mediaType).header("content-disposition", "attachment; filename =" + fileName)); } protected Response createStringResponse(String str) { return buildResponse(Response.ok(str)); } protected Response createJsonResponse(QueryResponse queryResponse) { try { return buildResponse(Response.ok(jsonObjectWriter.writeValueAsString(queryResponse), MediaType.APPLICATION_JSON_TYPE)); } catch (JsonProcessingException e) { e.printStackTrace(); logger.error("Error parsing queryResponse object"); return createErrorResponse("", "Error parsing QueryResponse object:\n" + Arrays.toString(e.getStackTrace())); } } private Response buildResponse(ResponseBuilder responseBuilder) { return responseBuilder .header("Access-Control-Allow-Origin", "*") .header("Access-Control-Allow-Headers", "x-requested-with, content-type") .header("Access-Control-Allow-Credentials", "true") .header("Access-Control-Allow-Methods", "GET, POST, OPTIONS") .build(); } /* * TO DELETE */ @Deprecated protected Response generateResponse(String queryString, List features) throws IOException { return createOkResponse("TODO: generateResponse is deprecated"); } @Deprecated protected Response generateResponse(String queryString, String headerTag, List features) throws IOException { return createOkResponse("TODO: generateResponse is deprecated"); } @Deprecated private boolean isSpecieAvailable(String species) { List<CellBaseConfiguration.SpeciesProperties.Species> speciesList = cellBaseConfiguration.getAllSpecies(); for (int i = 0; i < speciesList.size(); i++) { // This only allows to show the information if species is in 3 // letters format if (species.equalsIgnoreCase(speciesList.get(i).getId())) { return true; } } return false; } // protected List<Query> createQueries(String csvField, String queryKey) { // String[] ids = csvField.split(","); // List<Query> queries = new ArrayList<>(ids.length); // for (String s : ids) { // queries.add(new Query(queryKey, s)); // } // return queries; // } protected List<Query> createQueries(String csvField, String queryKey, String... args) { String[] ids = csvField.split(","); List<Query> queries = new ArrayList<>(ids.length); for (String s : ids) { Query query = new Query(queryKey, s); if (args != null && args.length > 0 && args.length % 2 == 0) { for (int i = 0; i < args.length; i += 2) { query.put(args[i], args[i + 1]); } } queries.add(query); } return queries; } }
package com.marshalchen.common.ui; import android.app.Activity; import android.content.Context; import android.os.SystemClock; import android.util.AttributeSet; import android.util.DisplayMetrics; import android.util.Log; import android.view.MotionEvent; import android.view.ViewGroup; import android.view.animation.Interpolator; import android.widget.AbsListView; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.ListView; public class PullToZoomListView extends ListView implements AbsListView.OnScrollListener { private static final int INVALID_VALUE = -1; private static final String TAG = "PullToZoomListView"; private static final Interpolator sInterpolator = new Interpolator() { public float getInterpolation(float paramAnonymousFloat) { float f = paramAnonymousFloat - 1.0F; return 1.0F + f * (f * (f * (f * f))); } }; int mActivePointerId = -1; private FrameLayout mHeaderContainer; private int mHeaderHeight; private ImageView mHeaderImage; float mLastMotionY = -1.0F; float mLastScale = -1.0F; float mMaxScale = -1.0F; private OnScrollListener mOnScrollListener; private ScalingRunnalable mScalingRunnalable; private int mScreenHeight; private ImageView mShadow; public PullToZoomListView(Context paramContext) { super(paramContext); init(paramContext); } public PullToZoomListView(Context paramContext, AttributeSet paramAttributeSet) { super(paramContext, paramAttributeSet); init(paramContext); } public PullToZoomListView(Context paramContext, AttributeSet paramAttributeSet, int paramInt) { super(paramContext, paramAttributeSet, paramInt); init(paramContext); } private void endScraling() { if (this.mHeaderContainer.getBottom() >= this.mHeaderHeight) Log.d("mmm", "endScraling"); this.mScalingRunnalable.startAnimation(200L); } private void init(Context paramContext) { DisplayMetrics localDisplayMetrics = new DisplayMetrics(); ((Activity) paramContext).getWindowManager().getDefaultDisplay() .getMetrics(localDisplayMetrics); this.mScreenHeight = localDisplayMetrics.heightPixels; this.mHeaderContainer = new FrameLayout(paramContext); this.mHeaderImage = new ImageView(paramContext); int i = localDisplayMetrics.widthPixels; setHeaderViewSize(i, (int) (9.0F * (i / 16.0F))); this.mShadow = new ImageView(paramContext); FrameLayout.LayoutParams localLayoutParams = new FrameLayout.LayoutParams( -1, -2); localLayoutParams.gravity = 80; this.mShadow.setLayoutParams(localLayoutParams); this.mHeaderContainer.addView(this.mHeaderImage); this.mHeaderContainer.addView(this.mShadow); addHeaderView(this.mHeaderContainer); this.mScalingRunnalable = new ScalingRunnalable(); super.setOnScrollListener(this); } private void onSecondaryPointerUp(MotionEvent paramMotionEvent) { int i = (paramMotionEvent.getAction()) >> 8; if (paramMotionEvent.getPointerId(i) == this.mActivePointerId) if (i != 0) { int j = 1; this.mLastMotionY = paramMotionEvent.getY(0); this.mActivePointerId = paramMotionEvent.getPointerId(0); return; } } private void reset() { this.mActivePointerId = -1; this.mLastMotionY = -1.0F; this.mMaxScale = -1.0F; this.mLastScale = -1.0F; } public ImageView getHeaderView() { return this.mHeaderImage; } public boolean onInterceptTouchEvent(MotionEvent paramMotionEvent) { return super.onInterceptTouchEvent(paramMotionEvent); } protected void onLayout(boolean paramBoolean, int paramInt1, int paramInt2, int paramInt3, int paramInt4) { super.onLayout(paramBoolean, paramInt1, paramInt2, paramInt3, paramInt4); if (this.mHeaderHeight == 0) this.mHeaderHeight = this.mHeaderContainer.getHeight(); } @Override public void onScroll(AbsListView paramAbsListView, int paramInt1, int paramInt2, int paramInt3) { Log.d("mmm", "onScroll"); float f = this.mHeaderHeight - this.mHeaderContainer.getBottom(); Log.d("mmm", "f|" + f); if ((f > 0.0F) && (f < this.mHeaderHeight)) { Log.d("mmm", "1"); int i = (int) (0.65D * f); this.mHeaderImage.scrollTo(0, -i); } else if (this.mHeaderImage.getScrollY() != 0) { Log.d("mmm", "2"); this.mHeaderImage.scrollTo(0, 0); } if (this.mOnScrollListener != null) { this.mOnScrollListener.onScroll(paramAbsListView, paramInt1, paramInt2, paramInt3); } } public void onScrollStateChanged(AbsListView paramAbsListView, int paramInt) { if (this.mOnScrollListener != null) this.mOnScrollListener.onScrollStateChanged(paramAbsListView, paramInt); } public boolean onTouchEvent(MotionEvent paramMotionEvent) { Log.d("mmm", "" + (0xFF & paramMotionEvent.getAction())); switch (0xFF & paramMotionEvent.getAction()) { case 4: case 0: if (!this.mScalingRunnalable.mIsFinished) { this.mScalingRunnalable.abortAnimation(); } this.mLastMotionY = paramMotionEvent.getY(); this.mActivePointerId = paramMotionEvent.getPointerId(0); this.mMaxScale = (this.mScreenHeight / this.mHeaderHeight); this.mLastScale = (this.mHeaderContainer.getBottom() / this.mHeaderHeight); break; case 2: Log.d("mmm", "mActivePointerId" + mActivePointerId); int j = paramMotionEvent.findPointerIndex(this.mActivePointerId); if (j == -1) { Log.e("PullToZoomListView", "Invalid pointerId=" + this.mActivePointerId + " in onTouchEvent"); } else { if (this.mLastMotionY == -1.0F) this.mLastMotionY = paramMotionEvent.getY(j); if (this.mHeaderContainer.getBottom() >= this.mHeaderHeight) { ViewGroup.LayoutParams localLayoutParams = this.mHeaderContainer .getLayoutParams(); float f = ((paramMotionEvent.getY(j) - this.mLastMotionY + this.mHeaderContainer .getBottom()) / this.mHeaderHeight - this.mLastScale) / 2.0F + this.mLastScale; if ((this.mLastScale <= 1.0D) && (f < this.mLastScale)) { localLayoutParams.height = this.mHeaderHeight; this.mHeaderContainer .setLayoutParams(localLayoutParams); return super.onTouchEvent(paramMotionEvent); } this.mLastScale = Math.min(Math.max(f, 1.0F), this.mMaxScale); localLayoutParams.height = ((int) (this.mHeaderHeight * this.mLastScale)); if (localLayoutParams.height < this.mScreenHeight) this.mHeaderContainer .setLayoutParams(localLayoutParams); this.mLastMotionY = paramMotionEvent.getY(j); return true; } this.mLastMotionY = paramMotionEvent.getY(j); } break; case 1: reset(); endScraling(); break; case 3: int i = paramMotionEvent.getActionIndex(); this.mLastMotionY = paramMotionEvent.getY(i); this.mActivePointerId = paramMotionEvent.getPointerId(i); break; case 5: onSecondaryPointerUp(paramMotionEvent); this.mLastMotionY = paramMotionEvent.getY(paramMotionEvent .findPointerIndex(this.mActivePointerId)); break; case 6: } return super.onTouchEvent(paramMotionEvent); } public void setHeaderViewSize(int paramInt1, int paramInt2) { Object localObject = this.mHeaderContainer.getLayoutParams(); if (localObject == null) localObject = new LayoutParams(paramInt1, paramInt2); ((ViewGroup.LayoutParams) localObject).width = paramInt1; ((ViewGroup.LayoutParams) localObject).height = paramInt2; this.mHeaderContainer .setLayoutParams((ViewGroup.LayoutParams) localObject); this.mHeaderHeight = paramInt2; } public void setOnScrollListener( OnScrollListener paramOnScrollListener) { this.mOnScrollListener = paramOnScrollListener; } public void setShadow(int paramInt) { this.mShadow.setBackgroundResource(paramInt); } class ScalingRunnalable implements Runnable { long mDuration; boolean mIsFinished = true; float mScale; long mStartTime; ScalingRunnalable() { } public void abortAnimation() { this.mIsFinished = true; } public boolean isFinished() { return this.mIsFinished; } public void run() { float f2; ViewGroup.LayoutParams localLayoutParams; if ((!this.mIsFinished) && (this.mScale > 1.0D)) { float f1 = ((float) SystemClock.currentThreadTimeMillis() - (float) this.mStartTime) / (float) this.mDuration; f2 = this.mScale - (this.mScale - 1.0F) * PullToZoomListView.sInterpolator.getInterpolation(f1); localLayoutParams = PullToZoomListView.this.mHeaderContainer .getLayoutParams(); if (f2 > 1.0F) { Log.d("mmm", "f2>1.0"); localLayoutParams.height = PullToZoomListView.this.mHeaderHeight; ; localLayoutParams.height = ((int) (f2 * PullToZoomListView.this.mHeaderHeight)); PullToZoomListView.this.mHeaderContainer .setLayoutParams(localLayoutParams); PullToZoomListView.this.post(this); return; } this.mIsFinished = true; } } public void startAnimation(long paramLong) { this.mStartTime = SystemClock.currentThreadTimeMillis(); this.mDuration = paramLong; this.mScale = ((float) (PullToZoomListView.this.mHeaderContainer .getBottom()) / PullToZoomListView.this.mHeaderHeight); this.mIsFinished = false; PullToZoomListView.this.post(this); } } }
package net.billylieurance.azuresearch; /* Copyright 2012 William Lieurance Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * * @author wlieurance */ public class AzureSearchImageQuery extends AbstractAzureSearchQuery<AzureSearchImageResult> { // private static final Logger log = Logger // .getLogger(AzureSearchNewsQuery.class.getName()); private String _imageFilters = ""; /** * * @return */ @Override public String getQueryPath() { return this.getPath() + querytypeToUrl(AZURESEARCH_QUERYTYPE.IMAGE); } /** * * @param entry * @return */ @Override public AzureSearchImageResult parseEntry(Node entry) { AzureSearchImageResult returnable = new AzureSearchImageResult(); try { NodeList l1kids = entry.getChildNodes(); for (int i = 0; i < l1kids.getLength(); i++) { Node l1kid = l1kids.item(i); if (l1kid.getNodeName().equals("content")) { // parse <content> /* * <d:ID * m:type="Edm.Guid">0b466975-9091-4c9d-9191-7bc829ffe6ea</d:ID> * <d:Title m:type="Edm.String">OKLAHOMA SOONERS</d:Title> * <d:MediaUrl * m:type="Edm.String">http://i.cnn.net/si/images * /basketball/college/logos/oklahoma_100.gif</d:MediaUrl> * <d:SourceUrl * m:type="Edm.String">http://sportsillustrated. * cnn.com/basketball * /college/men/rosters/2002/oklahoma/</d:SourceUrl> * <d:DisplayUrl * m:type="Edm.String">sportsillustrated.cnn.com * /basketball/college * /men/rosters/2002/oklahoma</d:DisplayUrl> <d:Width * m:type="Edm.Int32">100</d:Width> <d:Height * m:type="Edm.Int32">100</d:Height> <d:FileSize * m:type="Edm.Int64">3199</d:FileSize> <d:ContentType * m:type="Edm.String">image/gif</d:ContentType> * <d:Thumbnail m:type="Bing.Thumbnail"> <d:MediaUrl * m:type="Edm.String" * >http://ts1.mm.bing.net/images/thumbnail * .aspx?q=5046449779900500 * &amp;id=35e0371de51b63390dc0cc9a94304e62</d:MediaUrl> * <d:ContentType * m:type="Edm.String">image/jpg</d:ContentType> <d:Width * m:type="Edm.Int32">100</d:Width> <d:Height * m:type="Edm.Int32">100</d:Height> <d:FileSize * m:type="Edm.Int64">2432</d:FileSize> </d:Thumbnail> * </m:properties> */ NodeList contentKids = l1kid.getFirstChild() .getChildNodes(); for (int j = 0; j < contentKids.getLength(); j++) { Node contentKid = contentKids.item(j); try { if (contentKid.getNodeName().equals("d:ID")) { returnable.setId(contentKid.getTextContent()); } else if (contentKid.getNodeName().equals( "d:Title")) { returnable .setTitle(contentKid.getTextContent()); } else if (contentKid.getNodeName().equals( "d:MediaUrl")) { returnable.setMediaUrl(contentKid .getTextContent()); } else if (contentKid.getNodeName().equals( "d:SourceUrl")) { returnable.setSourceUrl(contentKid .getTextContent()); } else if (contentKid.getNodeName().equals( "d:DisplayUrl")) { returnable.setDisplayUrl(contentKid .getTextContent()); } else if (contentKid.getNodeName().equals( "d:Width")) { returnable.setWidth(Integer.parseInt(contentKid .getTextContent())); } else if (contentKid.getNodeName().equals( "d:Height")) { returnable.setHeight(Integer .parseInt(contentKid.getTextContent())); } else if (contentKid.getNodeName().equals( "d:FileSize")) { returnable .setFileSize(Long.parseLong(contentKid .getTextContent())); } else if (contentKid.getNodeName().equals( "d:ContentType")) { returnable.setContentType(contentKid .getTextContent()); } else if (contentKid.getNodeName().equals( "d:Thumbnail")) { //Do the thumbnail thing NodeList contentGrandkids = contentKid.getChildNodes(); for (int k = 0; k < contentGrandkids.getLength(); k++) { Node contentGrandKid = contentGrandkids.item(k); try { if (contentGrandKid.getNodeName().equals( "d:MediaUrl")) { returnable.getThumbnail().setMediaUrl(contentGrandKid .getTextContent()); } else if (contentGrandKid.getNodeName().equals( "d:Width")) { returnable.getThumbnail().setWidth(Integer.parseInt(contentGrandKid .getTextContent())); } else if (contentGrandKid.getNodeName().equals( "d:Height")) { returnable.getThumbnail().setHeight(Integer .parseInt(contentGrandKid.getTextContent())); } else if (contentGrandKid.getNodeName().equals( "d:FileSize")) { returnable.getThumbnail() .setFileSize(Long.parseLong(contentGrandKid .getTextContent())); } else if (contentGrandKid.getNodeName().equals( "d:ContentType")) { returnable.getThumbnail().setContentType(contentGrandKid .getTextContent()); } } catch (Exception ex) { // no one cares ex.printStackTrace(); } } //k } } catch (Exception ex) { // no one cares ex.printStackTrace(); } } //j } } } catch (NullPointerException ex) { ex.printStackTrace(); } return returnable; } /** * * @return */ @Override public String getAdditionalUrlQuery() { StringBuilder sb = new StringBuilder(6); if (!this.getImageFilters().equals("")) { sb.append("&ImageFilters='"); sb.append(this.getImageFilters()); sb.append("'"); } return sb.toString(); } /** * * @return */ public String getImageFilters() { return _imageFilters; } /** * * @param imageFilters */ public void setImageFilters(String imageFilters) { _imageFilters = imageFilters; } }
package com.intel.bugkoops; import android.content.Context; import android.content.SharedPreferences; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.net.Uri; import android.os.Bundle; import android.preference.PreferenceManager; import android.support.v7.app.AlertDialog; import android.text.method.LinkMovementMethod; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.widget.TextView; import org.json.JSONArray; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.ProtocolException; import java.net.URL; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.concurrent.TimeUnit; import javax.net.ssl.HttpsURLConnection; public class Utility { private static final String LOG_TAG = Utility.class.getSimpleName(); public static String getCameraId(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_cameraid_key), context.getString(R.string.pref_cameraid_default)); } public static String getFlashState(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_flashstate_key), context.getString(R.string.pref_flashstate_default)); } public static String getFocusingMode(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getString(context.getString(R.string.pref_focusingmode_key), context.getString(R.string.pref_focusingmode_default)); } public static boolean isInvertColorsEnabled(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getBoolean(context.getString(R.string.pref_invertcolors_key), Boolean.getBoolean(context.getString(R.string.pref_invertcolors_default))); } public static boolean isMeteringEnabled(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getBoolean(context.getString(R.string.pref_metering_key), Boolean.getBoolean(context.getString(R.string.pref_metering_default))); } public static boolean isExposureEnabled(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getBoolean(context.getString(R.string.pref_exposure_key), Boolean.getBoolean(context.getString(R.string.pref_exposure_default))); } public static boolean isFullscreenEnabled(Context context) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); return prefs.getBoolean(context.getString(R.string.pref_fullscreen_key), Boolean.getBoolean(context.getString(R.string.pref_fullscreen_default))); } public static void showAbout(Context context) { LayoutInflater inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); View messageView = inflater.inflate(R.layout.about, null, false); TextView creditsTextView = (TextView) messageView.findViewById(R.id.about_credits); creditsTextView.setMovementMethod(LinkMovementMethod.getInstance()); AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setIcon(R.mipmap.ic_launcher); builder.setTitle(R.string.app_name); builder.setView(messageView); builder.create(); builder.show(); } public static int bytesToInt(byte b0, byte b1) { return ((0xFF & b0) << 8) | (0xFF & b1); } public static int bytesToInt(byte b0, byte b1, byte b2, byte b3) { return ((0xFF & b0) << 24) | ((0xFF & b1) << 16) | ((0xFF & b2) << 8) | (0xFF & b3); } public static String bytesToString(byte[] data) { String text = ""; try { text = new String(data, "ISO-8859-1"); } catch (UnsupportedEncodingException e) { Log.e(LOG_TAG, "Unsupported encoding !"); } return text; } public static String summary(String text, int numberOfLines, int charsPerLine) { String summary = new String(); String[] lines = text.split("\\r?\\n", numberOfLines); for (String line : lines) { if (summary.length() + line.length() >= charsPerLine * numberOfLines) { break; } summary += line; summary += "\n"; } if (summary.length() == 0) { return summary; } return summary.substring(0, Math.min(charsPerLine * numberOfLines, summary.length() - 1)); } public static String summarySmall(String text) { final int NUMBER_OF_LINES = 3; final int CHARS_PER_LINE = 21; return summary(text, NUMBER_OF_LINES, CHARS_PER_LINE); } public static String summaryMedium(String text) { final int NUMBER_OF_LINES = 7; final int CHARS_PER_LINE = 25; return summary(text, NUMBER_OF_LINES, CHARS_PER_LINE); } public static String getPrettyDate(Context context, Date date) { final long DAY_IN_MILLIS = 1000 * 60 * 60 * 24; Date endDate = new Date(); long duration = endDate.getTime() - date.getTime(); long diffInSeconds = TimeUnit.MILLISECONDS.toSeconds(duration); long diffInMinutes = TimeUnit.MILLISECONDS.toMinutes(duration); long diffInHours = TimeUnit.MILLISECONDS.toHours(duration); long diffInDays = (int) (duration / DAY_IN_MILLIS); if (diffInDays == 0) { if (diffInHours > 0) { if (diffInHours == 1) { return Long.toString(diffInHours) + " " + context.getString(R.string.utility_pretty_date_hour_ago); } else { return Long.toString(diffInHours) + " " + context.getString(R.string.utility_pretty_date_hours_ago); } } else if (diffInMinutes > 0) { if (diffInMinutes == 1) { return Long.toString(diffInMinutes) + " " + context.getString(R.string.utility_pretty_date_minute_ago); } else { return Long.toString(diffInMinutes) + " " + context.getString(R.string.utility_pretty_date_minutes_ago); } } else if (diffInSeconds > 0) { if (diffInSeconds == 1) { return Long.toString(diffInSeconds) + " " + context.getString(R.string.utility_pretty_date_second_ago); } else { return Long.toString(diffInSeconds) + " " + context.getString(R.string.utility_pretty_date_seconds_ago); } } else { return context.getString(R.string.utility_pretty_date_just_now); } } else if (diffInDays == 1) { final DateFormat dateFormat = new SimpleDateFormat("HH:mm:ss"); return context.getString(R.string.utility_pretty_date_yesterday_at) + " " + dateFormat.format(date); } else { final DateFormat dateFormat = new SimpleDateFormat("EEE, d MMM HH:mm:ss"); return dateFormat.format(date); } } public static String getDate(Date date) { final DateFormat dateFormat = new SimpleDateFormat("EEE, d MMM HH:mm:ss"); return dateFormat.format(date); } public static boolean isNetworkAvailable(Context context) { ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo(); return activeNetworkInfo != null && activeNetworkInfo.isConnected(); } public static String defaultValue(String string, String defaultValue) { if (string == null) { return defaultValue; } return string; } public static String getString(Bundle bundle, String key) { if (bundle == null) { return null; } return bundle.getString(key); } public static String getString(Bundle bundle, String key, String defaultValue) { return defaultValue(getString(bundle, key), defaultValue); } public static Bundle getBundle(Bundle bundle, String key) { if (bundle == null) { return null; } return bundle.getBundle(key); } public static int getInt(Bundle bundle, String key, int defaultValue) { if (bundle == null) { return defaultValue; } return bundle.getInt(key, defaultValue); } public static byte xor(byte[] data, int start, int length) { int ans = 0; for(int index = start; index<start+length; ++index) { ans = ans ^ (int)(data[index]); } return (byte)(ans&0xFF); } }