text
stringlengths
7
1.01M
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInspection.dataFlow; import com.intellij.codeInsight.ExceptionUtil; import com.intellij.codeInsight.Nullability; import com.intellij.codeInsight.daemon.ImplicitUsageProvider; import com.intellij.codeInsight.daemon.impl.UnusedSymbolUtil; import com.intellij.codeInspection.dataFlow.ControlFlow.ControlFlowOffset; import com.intellij.codeInspection.dataFlow.Trap.InsideFinally; import com.intellij.codeInspection.dataFlow.Trap.TryCatch; import com.intellij.codeInspection.dataFlow.Trap.TryFinally; import com.intellij.codeInspection.dataFlow.Trap.TwrFinally; import com.intellij.codeInspection.dataFlow.inliner.*; import com.intellij.codeInspection.dataFlow.instructions.*; import com.intellij.codeInspection.dataFlow.instructions.MethodCallInstruction.MethodType; import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet; import com.intellij.codeInspection.dataFlow.value.*; import com.intellij.codeInspection.dataFlow.value.DfaRelationValue.RelationType; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.registry.Registry; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.InheritanceUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.FList; import com.siyeh.ig.callMatcher.CallMatcher; import com.siyeh.ig.numeric.UnnecessaryExplicitNumericCastInspection; import com.siyeh.ig.psiutils.*; import one.util.streamex.StreamEx; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.stream.Stream; import static com.intellij.psi.CommonClassNames.*; public class ControlFlowAnalyzer extends JavaElementVisitor { private static final Logger LOG = Logger.getInstance("#com.intellij.codeInspection.dataFlow.ControlFlowAnalyzer"); private static final CallMatcher LIST_INITIALIZER = CallMatcher.anyOf( CallMatcher.staticCall(JAVA_UTIL_ARRAYS, "asList"), CallMatcher.staticCall(JAVA_UTIL_LIST, "of")); static final int MAX_UNROLL_SIZE = 3; private final PsiElement myCodeFragment; private final boolean myIgnoreAssertions; private final boolean myInlining; private final Project myProject; private static class CannotAnalyzeException extends RuntimeException { } private final DfaValueFactory myFactory; private ControlFlow myCurrentFlow; private FList<Trap> myTrapStack = FList.emptyList(); private final ExceptionTransfer myRuntimeException; private final ExceptionTransfer myError; private final PsiType myAssertionError; private InlinedBlockContext myInlinedBlockContext; ControlFlowAnalyzer(final DfaValueFactory valueFactory, @NotNull PsiElement codeFragment, boolean ignoreAssertions, boolean inlining) { myInlining = inlining; myFactory = valueFactory; myCodeFragment = codeFragment; myProject = codeFragment.getProject(); myIgnoreAssertions = ignoreAssertions; GlobalSearchScope scope = codeFragment.getResolveScope(); myRuntimeException = new ExceptionTransfer(myFactory.createDfaType(createClassType(scope, JAVA_LANG_RUNTIME_EXCEPTION))); myError = new ExceptionTransfer(myFactory.createDfaType(createClassType(scope, JAVA_LANG_ERROR))); myAssertionError = createClassType(scope, JAVA_LANG_ASSERTION_ERROR); } private void buildClassInitializerFlow(PsiClass psiClass, boolean isStatic) { for (PsiElement element : psiClass.getChildren()) { if (element instanceof PsiField && !((PsiField)element).hasInitializer() && ((PsiField)element).hasModifierProperty(PsiModifier.STATIC) == isStatic) { visitField((PsiField)element); } } if (!isStatic && Stream.of(Extensions.getExtensions(ImplicitUsageProvider.EP_NAME)).anyMatch(p -> p.isClassWithCustomizedInitialization(psiClass))) { addInstruction(new EscapeInstruction(Collections.singleton(getFactory().getVarFactory().createThisValue(psiClass)))); addInstruction(new FlushFieldsInstruction()); } for (PsiElement element : psiClass.getChildren()) { if (((element instanceof PsiField && ((PsiField)element).hasInitializer()) || element instanceof PsiClassInitializer) && ((PsiMember)element).hasModifierProperty(PsiModifier.STATIC) == isStatic) { element.accept(this); } } addInstruction(new EndOfInitializerInstruction(isStatic)); addInstruction(new FlushFieldsInstruction()); } @Nullable public ControlFlow buildControlFlow() { myCurrentFlow = new ControlFlow(myFactory); try { if(myCodeFragment instanceof PsiClass) { // if(unknown) { staticInitializer(); } else { instanceInitializer(); } pushUnknown(); ConditionalGotoInstruction conditionalGoto = new ConditionalGotoInstruction(null, false, null); addInstruction(conditionalGoto); buildClassInitializerFlow((PsiClass)myCodeFragment, true); GotoInstruction unconditionalGoto = new GotoInstruction(null); addInstruction(unconditionalGoto); conditionalGoto.setOffset(getInstructionCount()); buildClassInitializerFlow((PsiClass)myCodeFragment, false); unconditionalGoto.setOffset(getInstructionCount()); } else { myCodeFragment.accept(this); } } catch (CannotAnalyzeException e) { return null; } PsiElement parent = myCodeFragment.getParent(); if (parent instanceof PsiLambdaExpression && myCodeFragment instanceof PsiExpression) { generateBoxingUnboxingInstructionFor((PsiExpression)myCodeFragment, LambdaUtil.getFunctionalInterfaceReturnType((PsiLambdaExpression)parent)); addInstruction(new CheckNotNullInstruction(NullabilityProblemKind.nullableReturn.problem((PsiExpression)myCodeFragment))); addInstruction(new PopInstruction()); } addInstruction(new ReturnInstruction(myFactory.controlTransfer(ReturnTransfer.INSTANCE, FList.emptyList()), null)); if (Registry.is("idea.dfa.live.variables.analysis")) { new LiveVariablesAnalyzer(myCurrentFlow, myFactory).flushDeadVariablesOnStatementFinish(); } return myCurrentFlow; } DfaValueFactory getFactory() { return myFactory; } PsiElement getContext() { return myCodeFragment; } private PsiClassType createClassType(GlobalSearchScope scope, String fqn) { PsiClass aClass = JavaPsiFacade.getInstance(myProject).findClass(fqn, scope); if (aClass != null) return JavaPsiFacade.getElementFactory(myProject).createType(aClass); return JavaPsiFacade.getElementFactory(myProject).createTypeByFQClassName(fqn, scope); } <T extends Instruction> T addInstruction(T i) { myCurrentFlow.addInstruction(i); return i; } int getInstructionCount() { return myCurrentFlow.getInstructionCount(); } private ControlFlowOffset getEndOffset(PsiElement element) { return myCurrentFlow.getEndOffset(element); } private ControlFlowOffset getStartOffset(PsiElement element) { return myCurrentFlow.getStartOffset(element); } private void startElement(PsiElement element) { myCurrentFlow.startElement(element); } private void finishElement(PsiElement element) { myCurrentFlow.finishElement(element); if (element instanceof PsiStatement && !(element instanceof PsiReturnStatement)) { addInstruction(new FinishElementInstruction(element)); } } @Override public void visitErrorElement(PsiErrorElement element) { throw new CannotAnalyzeException(); } @Override public void visitAssignmentExpression(PsiAssignmentExpression expression) { PsiExpression lExpr = expression.getLExpression(); PsiExpression rExpr = expression.getRExpression(); startElement(expression); if (rExpr == null) { pushUnknown(); finishElement(expression); return; } IElementType op = expression.getOperationTokenType(); PsiType type = expression.getType(); boolean isBoolean = PsiType.BOOLEAN.equals(type); if (op == JavaTokenType.EQ) { lExpr.accept(this); rExpr.accept(this); generateBoxingUnboxingInstructionFor(rExpr, type); } else if (op == JavaTokenType.ANDEQ && isBoolean) { generateBooleanAssignmentExpression(true, lExpr, rExpr, type); } else if (op == JavaTokenType.OREQ && isBoolean) { generateBooleanAssignmentExpression(false, lExpr, rExpr, type); } else if (op == JavaTokenType.XOREQ && isBoolean) { generateXorExpression(expression, new PsiExpression[]{lExpr, rExpr}, type, true); } else if (op == JavaTokenType.PLUSEQ && type != null && type.equalsToText(JAVA_LANG_STRING)) { lExpr.accept(this); addInstruction(new DupInstruction()); rExpr.accept(this); addInstruction(new BinopInstruction(JavaTokenType.PLUS, null, type)); } else if (isAssignmentDivision(op) && type != null && PsiType.LONG.isAssignableFrom(type)) { lExpr.accept(this); generateBoxingUnboxingInstructionFor(lExpr, type); rExpr.accept(this); generateBoxingUnboxingInstructionFor(rExpr, type); checkZeroDivisor(); addInstruction(new PopInstruction()); pushUnknown(); } else { generateDefaultAssignmentBinOp(lExpr, rExpr, type); } addInstruction(new AssignInstruction(rExpr, myFactory.createValue(lExpr))); finishElement(expression); } private void generateDefaultAssignmentBinOp(PsiExpression lExpr, PsiExpression rExpr, final PsiType exprType) { lExpr.accept(this); addInstruction(new DupInstruction()); generateBoxingUnboxingInstructionFor(lExpr,exprType); rExpr.accept(this); generateBoxingUnboxingInstructionFor(rExpr, exprType); addInstruction(new BinopInstruction(null, null, exprType)); } @Override public void visitAssertStatement(PsiAssertStatement statement) { if (myIgnoreAssertions) { return; } startElement(statement); final PsiExpression condition = statement.getAssertCondition(); final PsiExpression description = statement.getAssertDescription(); if (condition != null) { condition.accept(this); generateBoxingUnboxingInstructionFor(condition, PsiType.BOOLEAN); addInstruction(new ConditionalGotoInstruction(getEndOffset(statement), false, condition)); if (description != null) { description.accept(this); } throwException(myAssertionError, statement); } finishElement(statement); } @Override public void visitDeclarationStatement(PsiDeclarationStatement statement) { startElement(statement); PsiElement[] elements = statement.getDeclaredElements(); for (PsiElement element : elements) { if (element instanceof PsiClass) { addInstruction(new EmptyInstruction(element)); handleEscapedVariables(element); } else if (element instanceof PsiVariable) { PsiVariable variable = (PsiVariable)element; PsiExpression initializer = variable.getInitializer(); if (initializer != null) { initializeVariable(variable, initializer); } } } finishElement(statement); } @Override public void visitField(PsiField field) { PsiExpression initializer = field.getInitializer(); if (initializer != null) { initializeVariable(field, initializer); } else if (!field.hasModifierProperty(PsiModifier.FINAL) && !UnusedSymbolUtil.isImplicitWrite(field)) { // initialize with default value DfaVariableValue dfaVariable = myFactory.getVarFactory().createVariableValue(field); DfaConstValue value = myFactory.getConstFactory().createDefault(field.getType()); new CFGBuilder(this).assignAndPop(dfaVariable, value); } } @Override public void visitClassInitializer(PsiClassInitializer initializer) { visitCodeBlock(initializer.getBody()); } private void initializeVariable(PsiVariable variable, PsiExpression initializer) { if (DfaUtil.ignoreInitializer(variable)) return; DfaVariableValue dfaVariable = myFactory.getVarFactory().createVariableValue(variable); addInstruction(new PushInstruction(dfaVariable, initializer, true)); initializer.accept(this); generateBoxingUnboxingInstructionFor(initializer, variable.getType()); addInstruction(new AssignInstruction(initializer, dfaVariable)); addInstruction(new PopInstruction()); } @Override public void visitCodeFragment(JavaCodeFragment codeFragment) { startElement(codeFragment); if (codeFragment instanceof PsiExpressionCodeFragment) { PsiExpression expression = ((PsiExpressionCodeFragment)codeFragment).getExpression(); if (expression != null) { expression.accept(this); } } finishElement(codeFragment); } @Override public void visitCodeBlock(PsiCodeBlock block) { startElement(block); for (PsiStatement statement : block.getStatements()) { statement.accept(this); } flushCodeBlockVariables(block); finishElement(block); } private void flushCodeBlockVariables(PsiCodeBlock block) { for (PsiStatement statement : block.getStatements()) { if (statement instanceof PsiDeclarationStatement) { for (PsiElement declaration : ((PsiDeclarationStatement)statement).getDeclaredElements()) { if (declaration instanceof PsiVariable) { myCurrentFlow.removeVariable((PsiVariable)declaration); } } } } PsiElement parent = block.getParent(); if (parent instanceof PsiCatchSection) { myCurrentFlow.removeVariable(((PsiCatchSection)parent).getParameter()); } else if (parent instanceof PsiForeachStatement) { myCurrentFlow.removeVariable(((PsiForeachStatement)parent).getIterationParameter()); } else if (parent instanceof PsiForStatement) { PsiStatement statement = ((PsiForStatement)parent).getInitialization(); if (statement instanceof PsiDeclarationStatement) { for (PsiElement declaration : ((PsiDeclarationStatement)statement).getDeclaredElements()) { if (declaration instanceof PsiVariable) { myCurrentFlow.removeVariable((PsiVariable)declaration); } } } } else if (parent instanceof PsiTryStatement) { PsiResourceList list = ((PsiTryStatement)parent).getResourceList(); if (list != null) { for (PsiResourceListElement resource : list) { if (resource instanceof PsiResourceVariable) { myCurrentFlow.removeVariable((PsiVariable)resource); } } } } } @Override public void visitBlockStatement(PsiBlockStatement statement) { startElement(statement); statement.getCodeBlock().accept(this); finishElement(statement); } @Override public void visitBreakStatement(PsiBreakStatement statement) { startElement(statement); PsiStatement exitedStatement = statement.findExitedStatement(); if (exitedStatement != null && PsiTreeUtil.isAncestor(myCodeFragment, exitedStatement, false)) { controlTransfer(new InstructionTransfer(getEndOffset(exitedStatement), getVariablesInside(exitedStatement)), getTrapsInsideElement(exitedStatement)); } else { // Jumping out of analyzed code fragment controlTransfer(ReturnTransfer.INSTANCE, getTrapsInsideElement(myCodeFragment)); } finishElement(statement); } private void controlTransfer(@NotNull TransferTarget target, FList<Trap> traps) { addInstruction(new ControlTransferInstruction(myFactory.controlTransfer(target, traps))); } @NotNull private FList<Trap> getTrapsInsideElement(PsiElement element) { return FList.createFromReversed(ContainerUtil.reverse( ContainerUtil.findAll(myTrapStack, cd -> PsiTreeUtil.isAncestor(element, cd.getAnchor(), true)))); } @NotNull private List<DfaVariableValue> getVariablesInside(PsiElement exitedStatement) { return ContainerUtil.map(PsiTreeUtil.findChildrenOfType(exitedStatement, PsiVariable.class), myFactory.getVarFactory()::createVariableValue); } @Override public void visitContinueStatement(PsiContinueStatement statement) { startElement(statement); PsiStatement continuedStatement = statement.findContinuedStatement(); if (continuedStatement instanceof PsiLoopStatement && PsiTreeUtil.isAncestor(myCodeFragment, continuedStatement, false)) { PsiStatement body = ((PsiLoopStatement)continuedStatement).getBody(); controlTransfer(new InstructionTransfer(getEndOffset(body), getVariablesInside(body)), getTrapsInsideElement(body)); } else { // Jumping out of analyzed code fragment controlTransfer(ReturnTransfer.INSTANCE, getTrapsInsideElement(myCodeFragment)); } finishElement(statement); } @Override public void visitDoWhileStatement(PsiDoWhileStatement statement) { startElement(statement); PsiStatement body = statement.getBody(); if (body != null) { body.accept(this); PsiExpression condition = statement.getCondition(); if (condition != null) { condition.accept(this); generateBoxingUnboxingInstructionFor(condition, PsiType.BOOLEAN); addInstruction(new ConditionalGotoInstruction(getStartOffset(statement), false, condition)); } } finishElement(statement); } @Override public void visitEmptyStatement(PsiEmptyStatement statement) { startElement(statement); finishElement(statement); } @Override public void visitExpressionStatement(PsiExpressionStatement statement) { startElement(statement); final PsiExpression expr = statement.getExpression(); expr.accept(this); addInstruction(new PopInstruction()); finishElement(statement); } @Override public void visitExpressionListStatement(PsiExpressionListStatement statement) { startElement(statement); PsiExpression[] expressions = statement.getExpressionList().getExpressions(); for (PsiExpression expr : expressions) { expr.accept(this); addInstruction(new PopInstruction()); } finishElement(statement); } private DfaValue getIteratedElement(PsiType type, PsiExpression iteratedValue) { PsiExpression[] expressions = null; if (iteratedValue instanceof PsiNewExpression) { PsiArrayInitializerExpression initializer = ((PsiNewExpression)iteratedValue).getArrayInitializer(); if (initializer != null) { expressions = initializer.getInitializers(); } } else if (iteratedValue instanceof PsiReferenceExpression) { PsiElement arrayVar = ((PsiReferenceExpression)iteratedValue).resolve(); if (arrayVar instanceof PsiVariable) { expressions = ExpressionUtils.getConstantArrayElements((PsiVariable)arrayVar); } } if (iteratedValue instanceof PsiMethodCallExpression && LIST_INITIALIZER.test((PsiMethodCallExpression)iteratedValue)) { expressions = ((PsiMethodCallExpression)iteratedValue).getArgumentList().getExpressions(); } return expressions == null ? DfaUnknownValue.getInstance() : getFactory().createCommonValue(expressions, type); } @Override public void visitForeachStatement(PsiForeachStatement statement) { startElement(statement); final PsiParameter parameter = statement.getIterationParameter(); final PsiExpression iteratedValue = PsiUtil.skipParenthesizedExprDown(statement.getIteratedValue()); ControlFlowOffset loopEndOffset = getEndOffset(statement); boolean hasSizeCheck = false; if (iteratedValue != null) { iteratedValue.accept(this); addInstruction(new DereferenceInstruction(iteratedValue)); DfaValue qualifier = myFactory.createValue(iteratedValue); if (qualifier instanceof DfaVariableValue) { PsiType type = iteratedValue.getType(); SpecialField length = null; if (type instanceof PsiArrayType) { length = SpecialField.ARRAY_LENGTH; } else if (InheritanceUtil.isInheritor(type, JAVA_UTIL_COLLECTION)) { length = SpecialField.COLLECTION_SIZE; } if (length != null) { addInstruction(new PushInstruction(length.createValue(myFactory, qualifier), null)); addInstruction(new PushInstruction(myFactory.getInt(0), null)); addInstruction(new BinopInstruction(JavaTokenType.EQEQ, iteratedValue, PsiType.BOOLEAN)); addInstruction(new ConditionalGotoInstruction(loopEndOffset, false, null)); hasSizeCheck = true; } } } ControlFlowOffset offset = myCurrentFlow.getNextOffset(); DfaVariableValue dfaVariable = myFactory.getVarFactory().createVariableValue(parameter); new CFGBuilder(this).assignAndPop(dfaVariable, getIteratedElement(parameter.getType(), iteratedValue)); if (!hasSizeCheck) { pushUnknown(); addInstruction(new ConditionalGotoInstruction(loopEndOffset, true, null)); } final PsiStatement body = statement.getBody(); if (body != null) { body.accept(this); } if (hasSizeCheck) { pushUnknown(); addInstruction(new ConditionalGotoInstruction(loopEndOffset, true, null)); } addInstruction(new GotoInstruction(offset)); finishElement(statement); myCurrentFlow.removeVariable(parameter); } @Override public void visitForStatement(PsiForStatement statement) { startElement(statement); final ArrayList<PsiElement> declaredVariables = new ArrayList<>(); PsiStatement initialization = statement.getInitialization(); if (initialization != null) { initialization.accept(this); initialization.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitReferenceExpression(PsiReferenceExpression expression) { visitElement(expression); } @Override public void visitDeclarationStatement(PsiDeclarationStatement statement) { PsiElement[] declaredElements = statement.getDeclaredElements(); for (PsiElement element : declaredElements) { if (element instanceof PsiVariable) { declaredVariables.add(element); } } } }); } PsiExpression condition = statement.getCondition(); if (condition != null) { condition.accept(this); generateBoxingUnboxingInstructionFor(condition, PsiType.BOOLEAN); } else { addInstruction(new PushInstruction(statement.getRParenth() == null ? null : myFactory.getConstFactory().getTrue(), null)); } addInstruction(new ConditionalGotoInstruction(getEndOffset(statement), true, condition)); PsiStatement body = statement.getBody(); if (body != null) { body.accept(this); } if (!addCountingLoopBound(statement)) { PsiStatement update = statement.getUpdate(); if (update != null) { update.accept(this); } } ControlFlowOffset offset = initialization != null ? getEndOffset(initialization) : getStartOffset(statement); addInstruction(new GotoInstruction(offset)); finishElement(statement); for (PsiElement declaredVariable : declaredVariables) { PsiVariable psiVariable = (PsiVariable)declaredVariable; myCurrentFlow.removeVariable(psiVariable); } } @Nullable private static Long asLong(PsiExpression expression) { Object value = ExpressionUtils.computeConstantExpression(expression); if(value instanceof Integer || value instanceof Long) { return ((Number)value).longValue(); } return null; } /** * Add known-to-be-true condition inside counting loop, effectively converting * {@code for(int i=origin; i<bound; i++)} to * {@code int i = origin; while(i < bound) {... i++; if(i <= origin) break;}}. * This adds a range knowledge to data flow analysis. * <p> * Does nothing if the statement is not a counting loop. * * @param statement counting loop candidate. */ private boolean addCountingLoopBound(PsiForStatement statement) { CountingLoop loop = CountingLoop.from(statement); if (loop == null || loop.isDescending()) return false; PsiLocalVariable counter = loop.getCounter(); Long start = asLong(loop.getInitializer()); Long end = asLong(loop.getBound()); if (loop.isIncluding() && !(PsiType.LONG.equals(counter.getType()) && PsiType.INT.equals(loop.getBound().getType()))) { // could be for(int i=0; i<=Integer.MAX_VALUE; i++) which will overflow: conservatively skip this if (end == null || end == Long.MAX_VALUE || end == Integer.MAX_VALUE) return false; } PsiExpression initializer = loop.getInitializer(); PsiType type = loop.getCounter().getType(); if (!PsiType.INT.equals(type) && !PsiType.LONG.equals(type)) return false; DfaValue origin = null; Object initialValue = ExpressionUtils.computeConstantExpression(initializer); if (initialValue instanceof Number) { origin = myFactory.getConstFactory().createFromValue(initialValue, type, null); } else if (initializer instanceof PsiReferenceExpression) { PsiVariable initialVariable = ObjectUtils.tryCast(((PsiReferenceExpression)initializer).resolve(), PsiVariable.class); if ((initialVariable instanceof PsiLocalVariable || initialVariable instanceof PsiParameter) && !VariableAccessUtils.variableIsAssigned(initialVariable, statement.getBody())) { origin = myFactory.getVarFactory().createVariableValue(initialVariable); } } if (origin == null) return false; long diff = start == null || end == null ? -1 : end - start; DfaVariableValue loopVar = myFactory.getVarFactory().createVariableValue(counter); if(diff >= 0 && diff <= MAX_UNROLL_SIZE) { // Unroll small loops addInstruction(new PushInstruction(loopVar, null, true)); addInstruction(new PushInstruction(loopVar, null)); addInstruction(new PushInstruction(myFactory.getConstFactory().createFromValue(1, PsiType.INT, null), null)); addInstruction(new BinopInstruction(JavaTokenType.PLUS, null, loopVar.getVariableType())); addInstruction(new AssignInstruction(null, null)); addInstruction(new PopInstruction()); } else if (start != null) { long maxValue; if (end != null) { maxValue = loop.isIncluding() ? end + 1 : end; } else { maxValue = type.equals(PsiType.LONG) ? Long.MAX_VALUE : Integer.MAX_VALUE; } if (start >= maxValue) { addInstruction(new GotoInstruction(getEndOffset(statement))); } else { DfaValue range = myFactory.getFactValue(DfaFactType.RANGE, LongRangeSet.range(start + 1L, maxValue)); new CFGBuilder(this).assignAndPop(loopVar, range); } } else { new CFGBuilder(this).assign(loopVar, DfaUnknownValue.getInstance()) .push(origin) .compare(JavaTokenType.LE); addInstruction(new ConditionalGotoInstruction(getEndOffset(statement), false, null)); } return true; } @Override public void visitIfStatement(PsiIfStatement statement) { startElement(statement); PsiExpression condition = statement.getCondition(); PsiStatement thenStatement = statement.getThenBranch(); PsiStatement elseStatement = statement.getElseBranch(); ControlFlowOffset offset = elseStatement != null ? getStartOffset(elseStatement) : getEndOffset(statement); if (condition != null) { condition.accept(this); generateBoxingUnboxingInstructionFor(condition, PsiType.BOOLEAN); addInstruction(new ConditionalGotoInstruction(offset, true, condition)); } if (thenStatement != null) { thenStatement.accept(this); } if (elseStatement != null) { offset = getEndOffset(statement); Instruction instruction = new GotoInstruction(offset); addInstruction(instruction); elseStatement.accept(this); } finishElement(statement); } // in case of JspTemplateStatement @Override public void visitStatement(PsiStatement statement) { startElement(statement); finishElement(statement); } @Override public void visitLabeledStatement(PsiLabeledStatement statement) { startElement(statement); PsiStatement childStatement = statement.getStatement(); if (childStatement != null) { childStatement.accept(this); } finishElement(statement); } @Override public void visitLambdaExpression(PsiLambdaExpression expression) { startElement(expression); DfaValue dfaValue = myFactory.createValue(expression); addInstruction(new PushInstruction(dfaValue, expression)); handleEscapedVariables(expression); addInstruction(new LambdaInstruction(expression)); finishElement(expression); } private void handleEscapedVariables(PsiElement element) { Set<PsiLocalVariable> variables = new HashSet<>(); Set<DfaVariableValue> escapedVars = new HashSet<>(); element.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitReferenceExpression(PsiReferenceExpression expression) { super.visitReferenceExpression(expression); final PsiElement target = expression.resolve(); if (target instanceof PsiLocalVariable) { variables.add((PsiLocalVariable)target); } if (target instanceof PsiMember && !((PsiMember)target).hasModifierProperty(PsiModifier.STATIC)) { DfaVariableValue qualifier = getFactory().getExpressionFactory().getQualifierOrThisVariable(expression); if (qualifier != null) { escapedVars.add(qualifier); } } } @Override public void visitThisExpression(PsiThisExpression expression) { super.visitThisExpression(expression); DfaValue value = getFactory().createValue(expression); if (value instanceof DfaVariableValue) { escapedVars.add((DfaVariableValue)value); } } }); for (DfaValue value : getFactory().getValues()) { if(value instanceof DfaVariableValue && !((DfaVariableValue)value).isNegated()) { PsiModifierListOwner var = ((DfaVariableValue)value).getPsiVariable(); if (var instanceof PsiLocalVariable && variables.contains(var)) { escapedVars.add((DfaVariableValue)value); } } } if (!escapedVars.isEmpty()) { addInstruction(new EscapeInstruction(escapedVars)); } } @Override public void visitReturnStatement(PsiReturnStatement statement) { startElement(statement); PsiExpression returnValue = statement.getReturnValue(); if (myInlinedBlockContext != null) { if (returnValue != null) { DfaVariableValue var = myInlinedBlockContext.myTarget; addInstruction(new PushInstruction(var, null, true)); returnValue.accept(this); generateBoxingUnboxingInstructionFor(returnValue, var.getVariableType()); if (myInlinedBlockContext.myForceNonNullBlockResult) { addInstruction(new CheckNotNullInstruction(NullabilityProblemKind.nullableFunctionReturn.problem(returnValue))); } addInstruction(new AssignInstruction(returnValue, null)); addInstruction(new PopInstruction()); } controlTransfer(new InstructionTransfer(getEndOffset(myInlinedBlockContext.myCodeBlock), getVariablesInside( myInlinedBlockContext.myCodeBlock)), getTrapsInsideElement(myInlinedBlockContext.myCodeBlock)); } else { if (returnValue != null) { returnValue.accept(this); PsiMethod method = PsiTreeUtil.getParentOfType(statement, PsiMethod.class, true, PsiMember.class, PsiLambdaExpression.class); if (method != null) { generateBoxingUnboxingInstructionFor(returnValue, method.getReturnType()); } else { final PsiLambdaExpression lambdaExpression = PsiTreeUtil.getParentOfType(statement, PsiLambdaExpression.class, true, PsiMember.class); if (lambdaExpression != null) { generateBoxingUnboxingInstructionFor(returnValue, LambdaUtil.getFunctionalInterfaceReturnType(lambdaExpression)); } } addInstruction(new CheckNotNullInstruction(NullabilityProblemKind.nullableReturn.problem(returnValue))); addInstruction(new PopInstruction()); } addInstruction(new ReturnInstruction(myFactory.controlTransfer(ReturnTransfer.INSTANCE, myTrapStack), statement)); } finishElement(statement); } @Override public void visitSwitchLabelStatement(PsiSwitchLabelStatement statement) { startElement(statement); finishElement(statement); } @Override public void visitSwitchStatement(PsiSwitchStatement switchStmt) { startElement(switchStmt); PsiExpression caseExpression = switchStmt.getExpression(); Set<PsiEnumConstant> enumValues = null; DfaVariableValue expressionValue = null; if (caseExpression != null) { PsiType targetType = caseExpression.getType(); PsiPrimitiveType unboxedType = PsiPrimitiveType.getUnboxedType(targetType); if (unboxedType != null) { targetType = unboxedType; } expressionValue = getFactory().getVarFactory().createVariableValue(new DfaVariableSource() { @Override public boolean isStable() { return true; } @Override public String toString() { return "switch$var"; } }, targetType); addInstruction(new PushInstruction(expressionValue, null, true)); caseExpression.accept(this); generateBoxingUnboxingInstructionFor(caseExpression, targetType); final PsiClass psiClass = PsiUtil.resolveClassInClassTypeOnly(targetType); if (psiClass != null) { addInstruction(new CheckNotNullInstruction(NullabilityProblemKind.fieldAccessNPE.problem(caseExpression))); if (psiClass.isEnum()) { enumValues = new HashSet<>(); for (PsiField f : psiClass.getFields()) { if (f instanceof PsiEnumConstant) { enumValues.add((PsiEnumConstant)f); } } } } addInstruction(new AssignInstruction(null, null)); addInstruction(new PopInstruction()); } PsiCodeBlock body = switchStmt.getBody(); if (body != null) { PsiStatement[] statements = body.getStatements(); PsiSwitchLabelStatement defaultLabel = null; for (PsiStatement statement : statements) { if (statement instanceof PsiSwitchLabelStatement) { PsiSwitchLabelStatement psiLabelStatement = (PsiSwitchLabelStatement)statement; if (psiLabelStatement.isDefaultCase()) { defaultLabel = psiLabelStatement; } else { try { ControlFlowOffset offset = getStartOffset(statement); PsiExpression caseValue = psiLabelStatement.getCaseValue(); if (enumValues != null && caseValue instanceof PsiReferenceExpression) { //noinspection SuspiciousMethodCalls enumValues.remove(((PsiReferenceExpression)caseValue).resolve()); } boolean alwaysTrue = enumValues != null && enumValues.isEmpty(); if (alwaysTrue) { addInstruction(new PushInstruction(myFactory.getConstFactory().getTrue(), null)); } else if (caseValue != null && expressionValue != null) { addInstruction(new PushInstruction(expressionValue, null)); caseValue.accept(this); addInstruction(new BinopInstruction(JavaTokenType.EQEQ, null, PsiType.BOOLEAN)); } else { pushUnknown(); } addInstruction(new ConditionalGotoInstruction(offset, false, statement)); } catch (IncorrectOperationException e) { LOG.error(e); } } } } if (enumValues == null || !enumValues.isEmpty()) { ControlFlowOffset offset = defaultLabel != null ? getStartOffset(defaultLabel) : getEndOffset(body); addInstruction(new GotoInstruction(offset)); } body.accept(this); } if (expressionValue != null) { addInstruction(new FlushVariableInstruction(expressionValue)); } finishElement(switchStmt); } @Override public void visitMethodReferenceExpression(PsiMethodReferenceExpression expression) { startElement(expression); PsiExpression qualifier = expression.getQualifierExpression(); if (qualifier != null) { qualifier.accept(this); addInstruction(new DereferenceInstruction(qualifier)); } addInstruction(new PushInstruction(myFactory.createTypeValue(expression.getFunctionalInterfaceType(), Nullability.NOT_NULL), expression)); finishElement(expression); } @Override public void visitSynchronizedStatement(PsiSynchronizedStatement statement) { startElement(statement); PsiExpression lock = statement.getLockExpression(); if (lock != null) { lock.accept(this); addInstruction(new DereferenceInstruction(lock)); } addInstruction(new FlushFieldsInstruction()); PsiCodeBlock body = statement.getBody(); if (body != null) { body.accept(this); } finishElement(statement); } @Override public void visitThrowStatement(PsiThrowStatement statement) { startElement(statement); PsiExpression exception = statement.getException(); if (exception != null) { exception.accept(this); addInstruction(new DereferenceInstruction(exception)); throwException(exception.getType(), statement); } finishElement(statement); } private void addConditionalRuntimeThrow() { if (!shouldHandleException()) { return; } pushUnknown(); final ConditionalGotoInstruction ifNoException = addInstruction(new ConditionalGotoInstruction(null, false, null)); pushUnknown(); final ConditionalGotoInstruction ifError = addInstruction(new ConditionalGotoInstruction(null, false, null)); throwException(myRuntimeException, null); ifError.setOffset(myCurrentFlow.getInstructionCount()); throwException(myError, null); ifNoException.setOffset(myCurrentFlow.getInstructionCount()); } private boolean shouldHandleException() { for (Trap trap : myTrapStack) { if (trap instanceof TryCatch || trap instanceof TryFinally || trap instanceof TwrFinally) { return true; } } return false; } @Override public void visitTryStatement(PsiTryStatement statement) { startElement(statement); PsiResourceList resourceList = statement.getResourceList(); PsiCodeBlock tryBlock = statement.getTryBlock(); PsiCodeBlock finallyBlock = statement.getFinallyBlock(); TryFinally finallyDescriptor = finallyBlock != null ? new TryFinally(finallyBlock, getStartOffset(finallyBlock)) : null; if (finallyDescriptor != null) { pushTrap(finallyDescriptor); } PsiCatchSection[] sections = statement.getCatchSections(); if (sections.length > 0) { LinkedHashMap<PsiCatchSection, ControlFlowOffset> clauses = new LinkedHashMap<>(); for (PsiCatchSection section : sections) { PsiCodeBlock catchBlock = section.getCatchBlock(); if (catchBlock != null) { clauses.put(section, getStartOffset(catchBlock)); } } pushTrap(new TryCatch(statement, clauses)); } processTryWithResources(resourceList, tryBlock); InstructionTransfer gotoEnd = new InstructionTransfer(getEndOffset(statement), getVariablesInside(tryBlock)); FList<Trap> singleFinally = FList.createFromReversed(ContainerUtil.createMaybeSingletonList(finallyDescriptor)); controlTransfer(gotoEnd, singleFinally); if (sections.length > 0) { popTrap(TryCatch.class); } for (PsiCatchSection section : sections) { PsiCodeBlock catchBlock = section.getCatchBlock(); if (catchBlock != null) { visitCodeBlock(catchBlock); } controlTransfer(gotoEnd, singleFinally); } if (finallyBlock != null) { popTrap(TryFinally.class); pushTrap(new InsideFinally(finallyBlock)); finallyBlock.accept(this); controlTransfer(new ExitFinallyTransfer(finallyDescriptor), FList.emptyList()); popTrap(InsideFinally.class); } finishElement(statement); } void pushTrap(Trap elem) { myTrapStack = myTrapStack.prepend(elem); } void popTrap(Class<? extends Trap> aClass) { if (!aClass.isInstance(myTrapStack.getHead())) { throw new IllegalStateException("Unexpected trap-stack head (wanted: "+aClass.getSimpleName()+"); stack: "+myTrapStack); } myTrapStack = myTrapStack.getTail(); } private void processTryWithResources(@Nullable PsiResourceList resourceList, @Nullable PsiCodeBlock tryBlock) { Set<PsiClassType> closerExceptions = Collections.emptySet(); TwrFinally twrFinallyDescriptor = null; if (resourceList != null) { resourceList.accept(this); closerExceptions = StreamEx.of(resourceList.iterator()).flatCollection(ExceptionUtil::getCloserExceptions).toSet(); if (!closerExceptions.isEmpty()) { twrFinallyDescriptor = new TwrFinally(resourceList, getStartOffset(resourceList)); pushTrap(twrFinallyDescriptor); } } if (tryBlock != null) { tryBlock.accept(this); } if (twrFinallyDescriptor != null) { InstructionTransfer gotoEnd = new InstructionTransfer(getEndOffset(resourceList), getVariablesInside(tryBlock)); controlTransfer(gotoEnd, FList.createFromReversed(ContainerUtil.createMaybeSingletonList(twrFinallyDescriptor))); popTrap(TwrFinally.class); pushTrap(new InsideFinally(resourceList)); startElement(resourceList); addThrows(null, closerExceptions.toArray(PsiClassType.EMPTY_ARRAY)); controlTransfer(new ExitFinallyTransfer(twrFinallyDescriptor), FList.emptyList()); // DfaControlTransferValue is on stack finishElement(resourceList); popTrap(InsideFinally.class); } } @Override public void visitResourceList(PsiResourceList resourceList) { for (PsiResourceListElement resource : resourceList) { if (resource instanceof PsiResourceVariable) { PsiResourceVariable variable = (PsiResourceVariable)resource; PsiExpression initializer = variable.getInitializer(); if (initializer != null) { initializeVariable(variable, initializer); } } else if (resource instanceof PsiResourceExpression) { ((PsiResourceExpression)resource).getExpression().accept(this); addInstruction(new PopInstruction()); } } } @Override public void visitWhileStatement(PsiWhileStatement statement) { startElement(statement); PsiExpression condition = statement.getCondition(); if (condition != null) { condition.accept(this); generateBoxingUnboxingInstructionFor(condition, PsiType.BOOLEAN); } else { pushUnknown(); } addInstruction(new ConditionalGotoInstruction(getEndOffset(statement), true, condition)); PsiStatement body = statement.getBody(); if (body != null) { body.accept(this); } addInstruction(new GotoInstruction(getStartOffset(statement))); finishElement(statement); } @Override public void visitExpressionList(PsiExpressionList list) { startElement(list); PsiExpression[] expressions = list.getExpressions(); for (PsiExpression expression : expressions) { expression.accept(this); } finishElement(list); } @Override public void visitExpression(PsiExpression expression) { startElement(expression); DfaValue dfaValue = myFactory.createValue(expression); addInstruction(new PushInstruction(dfaValue, expression)); finishElement(expression); } @Override public void visitArrayAccessExpression(PsiArrayAccessExpression expression) { startElement(expression); PsiExpression arrayExpression = expression.getArrayExpression(); arrayExpression.accept(this); PsiExpression indexExpression = expression.getIndexExpression(); if (indexExpression != null) { indexExpression.accept(this); generateBoxingUnboxingInstructionFor(indexExpression, PsiType.INT); } else { addInstruction(new PushInstruction(DfaUnknownValue.getInstance(), null)); } DfaValue toPush = myFactory.createValue(expression); if (toPush == null) { toPush = myFactory.createTypeValue(expression.getType(), Nullability.UNKNOWN); } addInstruction(new ArrayAccessInstruction(toPush, expression)); finishElement(expression); } @Nullable private DfaVariableValue getTargetVariable(PsiExpression expression) { PsiElement parent = PsiUtil.skipParenthesizedExprUp(expression.getParent()); if (expression instanceof PsiArrayInitializerExpression && parent instanceof PsiNewExpression) { parent = PsiUtil.skipParenthesizedExprUp(parent.getParent()); } if (parent instanceof PsiVariable) { // initialization return getFactory().getVarFactory().createVariableValue((PsiVariable)parent); } if (parent instanceof PsiAssignmentExpression) { PsiAssignmentExpression assignmentExpression = (PsiAssignmentExpression)parent; if (assignmentExpression.getOperationTokenType().equals(JavaTokenType.EQ) && PsiTreeUtil.isAncestor(assignmentExpression.getRExpression(), expression, false)) { DfaValue value = getFactory().createValue(assignmentExpression.getLExpression()); if (value instanceof DfaVariableValue) { return (DfaVariableValue)value; } } } return null; } @Override public void visitArrayInitializerExpression(PsiArrayInitializerExpression expression) { startElement(expression); initializeArray(expression, expression); finishElement(expression); } private void initializeArray(PsiArrayInitializerExpression expression, PsiExpression originalExpression) { PsiType type = expression.getType(); PsiType componentType = type instanceof PsiArrayType ? ((PsiArrayType)type).getComponentType() : null; DfaVariableValue var = getTargetVariable(expression); DfaVariableValue arrayWriteTarget = var; if (var == null) { var = createTempVariable(type); } PsiExpression[] initializers = expression.getInitializers(); DfaExpressionFactory expressionFactory = myFactory.getExpressionFactory(); if (arrayWriteTarget != null) { PsiVariable arrayVariable = ObjectUtils.tryCast(arrayWriteTarget.getPsiVariable(), PsiVariable.class); if (arrayWriteTarget.isFlushableByCalls() || arrayVariable == null || VariableAccessUtils.variableIsUsed(arrayVariable, expression) || ExpressionUtils.getConstantArrayElements(arrayVariable) != null || !(expressionFactory.getArrayElementValue(arrayWriteTarget, 0) instanceof DfaVariableValue)) { arrayWriteTarget = null; } } DfaValue arrayValue = myFactory.withFact(myFactory.createTypeValue(type, Nullability.NOT_NULL), DfaFactType.LOCALITY, true); if (arrayWriteTarget != null) { addInstruction(new PushInstruction(arrayWriteTarget, null, true)); addInstruction(new PushInstruction(arrayValue, expression)); addInstruction(new AssignInstruction(originalExpression, arrayWriteTarget)); int index = 0; for (PsiExpression initializer : initializers) { DfaValue target = Objects.requireNonNull(expressionFactory.getArrayElementValue(arrayWriteTarget, index++)); addInstruction(new PushInstruction(target, null, true)); initializer.accept(this); if (componentType != null) { generateBoxingUnboxingInstructionFor(initializer, componentType); } addInstruction(new AssignInstruction(initializer, null)); addInstruction(new PopInstruction()); } } else { Nullability nullability = Nullability.UNKNOWN; if (componentType != null) { nullability = DfaPsiUtil.getTypeNullability(componentType); if (nullability == Nullability.UNKNOWN && originalExpression != expression) { PsiType expectedType = ExpectedTypeUtils.findExpectedType(originalExpression, false); if (expectedType instanceof PsiArrayType) { nullability = DfaPsiUtil.getTypeNullability(((PsiArrayType)expectedType).getComponentType()); } } } for (PsiExpression initializer : initializers) { initializer.accept(this); if (componentType != null) { generateBoxingUnboxingInstructionFor(initializer, componentType); if (nullability == Nullability.NOT_NULL) { addInstruction(new CheckNotNullInstruction(NullabilityProblemKind.storingToNotNullArray.problem(initializer))); } } addInstruction(new PopInstruction()); } addInstruction(new PushInstruction(var, null, true)); addInstruction(new PushInstruction(arrayValue, expression)); addInstruction(new AssignInstruction(originalExpression, var)); } // Declaration: write array length DfaConstValue lengthValue = getFactory().getInt(expression.getInitializers().length); new CFGBuilder(this).assignAndPop(SpecialField.ARRAY_LENGTH.createValue(getFactory(), var), lengthValue); } @Override public void visitPolyadicExpression(PsiPolyadicExpression expression) { startElement(expression); DfaValue dfaValue = myFactory.createValue(expression); if (dfaValue != null) { addInstruction(new PushInstruction(dfaValue, expression)); finishElement(expression); return; } IElementType op = expression.getOperationTokenType(); PsiExpression[] operands = expression.getOperands(); if (operands.length <= 1) { pushUnknown(); finishElement(expression); return; } PsiType type = expression.getType(); if (op == JavaTokenType.ANDAND) { generateAndOrExpression(expression, operands, type, true, true); } else if (op == JavaTokenType.OROR) { generateAndOrExpression(expression, operands, type, false, true); } else if (op == JavaTokenType.XOR && PsiType.BOOLEAN.equals(type)) { generateXorExpression(expression, operands, type, false); } else if (op == JavaTokenType.AND && PsiType.BOOLEAN.equals(type)) { generateAndOrExpression(expression, operands, type, true, false); } else if (op == JavaTokenType.OR && PsiType.BOOLEAN.equals(type)) { generateAndOrExpression(expression, operands, type, false, false); } else if (isBinaryDivision(op) && operands.length == 2 && type != null && PsiType.LONG.isAssignableFrom(type)) { generateDivMod(expression, type, operands[0], operands[1]); } else { generateOther(expression, op, operands, type); } finishElement(expression); } static boolean isBinaryDivision(IElementType binaryOp) { return binaryOp == JavaTokenType.DIV || binaryOp == JavaTokenType.PERC; } static boolean isAssignmentDivision(IElementType op) { return op == JavaTokenType.PERCEQ || op == JavaTokenType.DIVEQ; } private void generateDivMod(PsiPolyadicExpression expression, PsiType type, PsiExpression left, PsiExpression right) { left.accept(this); generateBoxingUnboxingInstructionFor(left, type); right.accept(this); generateBoxingUnboxingInstructionFor(right, type); checkZeroDivisor(); addInstruction(new BinopInstruction(expression.getOperationTokenType(), expression.isPhysical() ? expression : null, type)); } private void checkZeroDivisor() { addInstruction(new DupInstruction()); addInstruction(new PushInstruction(myFactory.getConstFactory().createFromValue(0, PsiType.LONG, null), null)); addInstruction(new BinopInstruction(JavaTokenType.NE, null, PsiType.BOOLEAN)); ConditionalGotoInstruction ifNonZero = new ConditionalGotoInstruction(null, false, null); addInstruction(ifNonZero); throwException(JavaPsiFacade.getElementFactory(myProject).createTypeByFQClassName(ArithmeticException.class.getName()), null); ifNonZero.setOffset(myCurrentFlow.getInstructionCount()); } private void generateOther(PsiPolyadicExpression expression, IElementType op, PsiExpression[] operands, PsiType type) { op = substituteBinaryOperation(expression, op); PsiExpression lExpr = operands[0]; lExpr.accept(this); PsiType lType = lExpr.getType(); for (int i = 1; i < operands.length; i++) { PsiExpression rExpr = operands[i]; PsiType rType = rExpr.getType(); acceptBinaryRightOperand(op, type, lExpr, lType, rExpr, rType); addInstruction(new BinopInstruction(op, expression.isPhysical() ? expression : null, type, i)); lExpr = rExpr; lType = rType; } } @Nullable private IElementType substituteBinaryOperation(PsiPolyadicExpression expression, IElementType op) { if (JavaTokenType.PLUS == op) { if (TypeUtils.isJavaLangString(expression.getType()) || isAcceptableContextForMathOperation(expression)) return op; return null; } if (JavaTokenType.MINUS == op && !isAcceptableContextForMathOperation(expression)) return null; return op; } private boolean isAcceptableContextForMathOperation(PsiExpression expression) { PsiElement parent = expression.getParent(); while (parent != null && parent != myCodeFragment) { if (parent instanceof PsiExpressionList || parent instanceof PsiArrayInitializerExpression || parent instanceof PsiArrayAccessExpression) { return true; } if (parent instanceof PsiBinaryExpression && RelationType.fromElementType(((PsiBinaryExpression)parent).getOperationTokenType()) != null) { return true; } if (parent instanceof PsiLoopStatement && !(parent instanceof PsiForStatement && PsiTreeUtil.isAncestor(((PsiForStatement)parent).getInitialization(), expression, false))) { return false; } parent = parent.getParent(); } return true; } private void acceptBinaryRightOperand(@Nullable IElementType op, PsiType type, PsiExpression lExpr, PsiType lType, PsiExpression rExpr, PsiType rType) { boolean comparing = op == JavaTokenType.EQEQ || op == JavaTokenType.NE; boolean comparingRef = comparing && !TypeConversionUtil.isPrimitiveAndNotNull(lType) && !TypeConversionUtil.isPrimitiveAndNotNull(rType); boolean comparingPrimitiveNumeric = comparing && TypeConversionUtil.isPrimitiveAndNotNull(lType) && TypeConversionUtil.isPrimitiveAndNotNull(rType) && TypeConversionUtil.isNumericType(lType) && TypeConversionUtil.isNumericType(rType); PsiType castType = comparingPrimitiveNumeric ? TypeConversionUtil.isFloatOrDoubleType(lType) ? PsiType.DOUBLE : PsiType.LONG : type; if (!comparingRef) { generateBoxingUnboxingInstructionFor(lExpr,castType); } rExpr.accept(this); if (!comparingRef) { generateBoxingUnboxingInstructionFor(rExpr, castType); } } void generateBoxingUnboxingInstructionFor(@NotNull PsiExpression expression, PsiType expectedType) { generateBoxingUnboxingInstructionFor(expression, expression.getType(), expectedType); } void generateBoxingUnboxingInstructionFor(@NotNull PsiExpression context, PsiType actualType, PsiType expectedType) { if (PsiType.VOID.equals(expectedType)) return; if (TypeConversionUtil.isPrimitiveAndNotNull(expectedType) && TypeConversionUtil.isPrimitiveWrapper(actualType)) { addInstruction(new MethodCallInstruction(context, MethodType.UNBOXING, expectedType)); } else if (TypeConversionUtil.isPrimitiveAndNotNull(actualType) && TypeConversionUtil.isAssignableFromPrimitiveWrapper(expectedType)) { addConditionalRuntimeThrow(); addInstruction(new MethodCallInstruction(context, MethodType.BOXING, expectedType)); } else if (actualType != expectedType && TypeConversionUtil.isPrimitiveAndNotNull(actualType) && TypeConversionUtil.isPrimitiveAndNotNull(expectedType) && TypeConversionUtil.isNumericType(actualType) && TypeConversionUtil.isNumericType(expectedType)) { addInstruction(new MethodCallInstruction(context, MethodType.CAST, expectedType) { @Override public DfaInstructionState[] accept(DataFlowRunner runner, DfaMemoryState stateBefore, InstructionVisitor visitor) { return visitor.visitCast(this, runner, stateBefore); } }); } } private void generateXorExpression(PsiExpression expression, PsiExpression[] operands, final PsiType exprType, boolean forAssignment) { PsiExpression operand = operands[0]; operand.accept(this); if (forAssignment) { addInstruction(new DupInstruction()); } generateBoxingUnboxingInstructionFor(operand, exprType); for (int i = 1; i < operands.length; i++) { operand = operands[i]; operand.accept(this); generateBoxingUnboxingInstructionFor(operand, exprType); PsiExpression psiAnchor = expression.isPhysical() ? expression : null; addInstruction(new BinopInstruction(JavaTokenType.NE, psiAnchor, exprType, i)); } } private void generateBooleanAssignmentExpression(boolean and, PsiExpression lExpression, PsiExpression rExpression, PsiType exprType) { lExpression.accept(this); generateBoxingUnboxingInstructionFor(lExpression, exprType); addInstruction(new DupInstruction()); rExpression.accept(this); generateBoxingUnboxingInstructionFor(rExpression, exprType); addInstruction(new SwapInstruction()); combineStackBooleans(and, lExpression); } private void combineStackBooleans(boolean and, PsiExpression anchor) { ConditionalGotoInstruction toPopAndPushSuccess = new ConditionalGotoInstruction(null, and, anchor); addInstruction(toPopAndPushSuccess); GotoInstruction overPushSuccess = new GotoInstruction(null); addInstruction(overPushSuccess); PopInstruction pop = new PopInstruction(); addInstruction(pop); DfaConstValue constValue = myFactory.getBoolean(!and); PushInstruction pushSuccess = new PushInstruction(constValue, null); addInstruction(pushSuccess); toPopAndPushSuccess.setOffset(pop.getIndex()); overPushSuccess.setOffset(pushSuccess.getIndex() + 1); } private void generateAndOrExpression(PsiExpression expression, PsiExpression[] operands, final PsiType exprType, boolean and, boolean shortCircuit) { for (int i = 0; i < operands.length; i++) { PsiExpression operand = operands[i]; operand.accept(this); generateBoxingUnboxingInstructionFor(operand, exprType); if (!shortCircuit) { if (i > 0) { combineStackBooleans(and, operand); } continue; } PsiExpression nextOperand = i == operands.length - 1 ? null : operands[i + 1]; if (nextOperand != null) { addInstruction(new ConditionalGotoInstruction(getStartOffset(nextOperand), !and, operand)); addInstruction(new PushInstruction(myFactory.getBoolean(!and), expression)); addInstruction(new GotoInstruction(getEndOffset(operands[operands.length - 1]))); } } if (shortCircuit) { addInstruction(new ResultOfInstruction(expression)); } } @Override public void visitClassObjectAccessExpression(PsiClassObjectAccessExpression expression) { startElement(expression); PsiTypeElement operand = expression.getOperand(); DfaConstValue classConstant = myFactory.getConstFactory().createFromValue(operand.getType(), expression.getType(), null); addInstruction(new PushInstruction(classConstant, expression)); finishElement(expression); } @Override public void visitConditionalExpression(PsiConditionalExpression expression) { startElement(expression); PsiExpression condition = expression.getCondition(); PsiExpression thenExpression = expression.getThenExpression(); PsiExpression elseExpression = expression.getElseExpression(); final ControlFlowOffset elseOffset = elseExpression == null ? ControlFlow.deltaOffset(getEndOffset(expression), -1) : getStartOffset(elseExpression); if (thenExpression != null) { condition.accept(this); generateBoxingUnboxingInstructionFor(condition, PsiType.BOOLEAN); PsiType type = expression.getType(); addInstruction(new ConditionalGotoInstruction(elseOffset, true, PsiUtil.skipParenthesizedExprDown(condition))); thenExpression.accept(this); generateBoxingUnboxingInstructionFor(thenExpression,type); addInstruction(new GotoInstruction(getEndOffset(expression))); if (elseExpression != null) { elseExpression.accept(this); generateBoxingUnboxingInstructionFor(elseExpression,type); } else { pushUnknown(); } } else { pushUnknown(); } finishElement(expression); } void pushUnknown() { addInstruction(new PushInstruction(DfaUnknownValue.getInstance(), null)); } @Override public void visitInstanceOfExpression(PsiInstanceOfExpression expression) { startElement(expression); PsiExpression operand = expression.getOperand(); PsiTypeElement checkType = expression.getCheckType(); if (checkType != null) { operand.accept(this); PsiType type = checkType.getType(); if (type instanceof PsiClassType) { type = ((PsiClassType)type).rawType(); } addInstruction(new PushInstruction(myFactory.createTypeValue(type, Nullability.NOT_NULL), null)); addInstruction(new InstanceofInstruction(expression, operand, type)); } else { pushUnknown(); } finishElement(expression); } private void addMethodThrows(PsiMethod method, @Nullable PsiElement explicitCall) { if (method != null) { addThrows(explicitCall, method.getThrowsList().getReferencedTypes()); } } private void addThrows(@Nullable PsiElement explicitCall, PsiClassType[] refs) { for (PsiClassType ref : refs) { pushUnknown(); ConditionalGotoInstruction cond = new ConditionalGotoInstruction(null, false, null); addInstruction(cond); throwException(ref, explicitCall); cond.setOffset(myCurrentFlow.getInstructionCount()); } } void throwException(@Nullable PsiType ref, @Nullable PsiElement anchor) { if (ref != null) { throwException(new ExceptionTransfer(myFactory.createDfaType(ref)), anchor); } } private void throwException(ExceptionTransfer kind, @Nullable PsiElement anchor) { addInstruction(new ReturnInstruction(myFactory.controlTransfer(kind, myTrapStack), anchor)); } @Override public void visitMethodCallExpression(PsiMethodCallExpression call) { ArrayDeque<PsiMethodCallExpression> calls = new ArrayDeque<>(); while (true) { calls.addFirst(call); startElement(call); if (tryInline(call)) { finishElement(call); calls.removeFirst(); break; } PsiExpression qualifierExpression = call.getMethodExpression().getQualifierExpression(); if (qualifierExpression == null) { DfaValue thisVariable = myFactory.getExpressionFactory().getQualifierOrThisVariable(call.getMethodExpression()); if (thisVariable != null) { addInstruction(new PushInstruction(thisVariable, null)); } else { pushUnknown(); } break; } call = ObjectUtils.tryCast(PsiUtil.skipParenthesizedExprDown(qualifierExpression), PsiMethodCallExpression.class); if (call == null) { qualifierExpression.accept(this); break; } } calls.forEach(this::finishCall); } private boolean tryInline(PsiMethodCallExpression call) { if (myInlining) { for (CallInliner inliner: INLINERS) { if (inliner.tryInlineCall(new CFGBuilder(this), call)) { return true; } } } return false; } private void finishCall(PsiMethodCallExpression call) { PsiExpression[] expressions = call.getArgumentList().getExpressions(); JavaResolveResult result = call.getMethodExpression().advancedResolve(false); PsiElement method = result.getElement(); PsiParameter[] parameters = method instanceof PsiMethod ? ((PsiMethod)method).getParameterList().getParameters() : null; for (int i = 0; i < expressions.length; i++) { PsiExpression paramExpr = expressions[i]; paramExpr.accept(this); if (parameters != null && i < parameters.length) { generateBoxingUnboxingInstructionFor(paramExpr, result.getSubstitutor().substitute(parameters[i].getType())); } } addBareCall(call, call.getMethodExpression()); finishElement(call); } void addBareCall(@Nullable PsiMethodCallExpression expression, @NotNull PsiReferenceExpression reference) { addConditionalRuntimeThrow(); PsiMethod method = ObjectUtils.tryCast(reference.resolve(), PsiMethod.class); List<? extends MethodContract> contracts = method == null ? Collections.emptyList() : JavaMethodContractUtil .getMethodCallContracts(method, expression); MethodCallInstruction instruction; PsiExpression anchor; if (expression == null) { assert reference instanceof PsiMethodReferenceExpression; instruction = new MethodCallInstruction((PsiMethodReferenceExpression)reference, contracts); anchor = reference; } else { instruction = new MethodCallInstruction(expression, myFactory.createValue(expression), contracts); anchor = expression; } addInstruction(instruction); if (contracts.stream().anyMatch(c -> c.getReturnValue().isFail())) { // if a contract resulted in 'fail', handle it addInstruction(new DupInstruction()); addInstruction(new PushInstruction(myFactory.getConstFactory().getContractFail(), null)); addInstruction(new BinopInstruction(JavaTokenType.EQEQ, null, PsiType.BOOLEAN)); ConditionalGotoInstruction ifNotFail = new ConditionalGotoInstruction(null, true, null); addInstruction(ifNotFail); addInstruction( new ReturnInstruction(myFactory.controlTransfer(new ExceptionTransfer(null), myTrapStack), anchor)); ifNotFail.setOffset(myCurrentFlow.getInstructionCount()); } if (shouldHandleException()) { addMethodThrows(method, anchor); } } /** * @deprecated use {@link JavaMethodContractUtil#findContractAnnotation(PsiMethod)}. */ @Deprecated @Nullable public static PsiAnnotation findContractAnnotation(@NotNull PsiMethod method) { return JavaMethodContractUtil.findContractAnnotation(method); } @Override public void visitEnumConstant(PsiEnumConstant enumConstant) { if (enumConstant.getArgumentList() == null) return; pushUnknown(); pushConstructorArguments(enumConstant); addInstruction(new MethodCallInstruction(enumConstant, null, Collections.emptyList())); addInstruction(new PopInstruction()); } @Override public void visitNewExpression(PsiNewExpression expression) { startElement(expression); PsiExpression qualifier = expression.getQualifier(); if (qualifier != null) { qualifier.accept(this); addInstruction(new CheckNotNullInstruction(NullabilityProblemKind.innerClassNPE.problem(expression))); addInstruction(new PopInstruction()); } PsiType type = expression.getType(); if (type instanceof PsiArrayType) { PsiArrayInitializerExpression arrayInitializer = expression.getArrayInitializer(); if (arrayInitializer != null) { initializeArray(arrayInitializer, expression); return; } DfaVariableValue var = getTargetVariable(expression); if (var == null) { var = createTempVariable(type); } DfaValue length = SpecialField.ARRAY_LENGTH.createValue(getFactory(), var); addInstruction(new PushInstruction(length, null, true)); // stack: ... var.length final PsiExpression[] dimensions = expression.getArrayDimensions(); if (dimensions.length > 0) { boolean sizeOnStack = false; for (final PsiExpression dimension : dimensions) { dimension.accept(this); if (sizeOnStack) { addInstruction(new PopInstruction()); } sizeOnStack = true; } } else { pushUnknown(); } // stack: ... var.length actual_size addInstruction(new PushInstruction(var, null, true)); DfaValue arrayValue = myFactory.withFact(myFactory.createExactTypeValue(type), DfaFactType.LOCALITY, true); addInstruction(new PushInstruction(arrayValue, expression)); addInstruction(new AssignInstruction(expression, var)); // stack: ... var.length actual_size var addInstruction(new SpliceInstruction(3, 0, 2, 1)); // stack: ... var var.length actual_size addInstruction(new AssignInstruction(null, length)); addInstruction(new PopInstruction()); // stack: ... var initializeSmallArray((PsiArrayType)type, var, dimensions); } else { pushUnknown(); // qualifier PsiMethod constructor = pushConstructorArguments(expression); PsiAnonymousClass anonymousClass = expression.getAnonymousClass(); if (anonymousClass != null) { handleEscapedVariables(anonymousClass); } addConditionalRuntimeThrow(); addInstruction(new MethodCallInstruction(expression, null, constructor == null ? Collections.emptyList() : JavaMethodContractUtil .getMethodContracts(constructor))); if (shouldHandleException()) { addMethodThrows(constructor, expression); } setEmptyCollectionSize(expression); } finishElement(expression); } private void setEmptyCollectionSize(PsiNewExpression expression) { DfaVariableValue var = getTargetVariable(expression); if (var != null && ConstructionUtils.isEmptyCollectionInitializer(expression)) { DfaValue collectionValue = myFactory.withFact(myFactory.createTypeValue(expression.getType(), Nullability.NOT_NULL), DfaFactType.LOCALITY, true); SpecialField sizeField = InheritanceUtil.isInheritor(expression.getType(), JAVA_UTIL_MAP) ? SpecialField.MAP_SIZE : SpecialField.COLLECTION_SIZE; new CFGBuilder(this).pop() .assign(var, collectionValue) .assignAndPop(sizeField.createValue(myFactory, var), myFactory.getInt(0)); } } private void initializeSmallArray(PsiArrayType type, DfaVariableValue var, PsiExpression[] dimensions) { if (dimensions.length != 1) return; PsiType componentType = type.getComponentType(); // Ignore objects as they may produce false NPE warnings due to non-perfect loop handling if (!(componentType instanceof PsiPrimitiveType)) return; Object val = ExpressionUtils.computeConstantExpression(dimensions[0]); if (val instanceof Integer) { int lengthValue = (Integer)val; if (lengthValue > 0 && lengthValue <= MAX_UNROLL_SIZE) { for (int i = 0; i < lengthValue; i++) { DfaValue value = getFactory().getExpressionFactory().getArrayElementValue(var, i); addInstruction(new PushInstruction(value, null, true)); } addInstruction(new PushInstruction(getFactory().getConstFactory().createDefault(componentType), null)); for (int i = lengthValue - 1; i >= 0; i--) { DfaValue value = getFactory().getExpressionFactory().getArrayElementValue(var, i); addInstruction(new AssignInstruction(null, value)); } addInstruction(new PopInstruction()); } } } @Nullable private PsiMethod pushConstructorArguments(PsiConstructorCall call) { PsiExpressionList args = call.getArgumentList(); PsiMethod ctr = call.resolveConstructor(); if (args != null) { PsiExpression[] params = args.getExpressions(); PsiParameter[] parameters = ctr == null ? null : ctr.getParameterList().getParameters(); for (int i = 0; i < params.length; i++) { PsiExpression param = params[i]; param.accept(this); if (parameters != null && i < parameters.length) { generateBoxingUnboxingInstructionFor(param, parameters[i].getType()); } } } return ctr; } @Override public void visitParenthesizedExpression(PsiParenthesizedExpression expression) { startElement(expression); PsiExpression inner = expression.getExpression(); if (inner != null) { inner.accept(this); } else { pushUnknown(); } finishElement(expression); } @Override public void visitPostfixExpression(PsiPostfixExpression expression) { startElement(expression); PsiExpression operand = PsiUtil.skipParenthesizedExprDown(expression.getOperand()); if (operand != null) { operand.accept(this); generateBoxingUnboxingInstructionFor(operand, PsiType.INT); pushUnknown(); addInstruction(new AssignInstruction(operand, null, myFactory.createValue(operand))); addInstruction(new PopInstruction()); } pushUnknown(); finishElement(expression); } @Override public void visitPrefixExpression(PsiPrefixExpression expression) { startElement(expression); DfaValue dfaValue = expression.getOperationTokenType() == JavaTokenType.EXCL ? null : myFactory.createValue(expression); if (dfaValue != null) { // Constant expression is computed: just push the result addInstruction(new PushInstruction(dfaValue, expression)); } else { PsiExpression operand = PsiUtil.skipParenthesizedExprDown(expression.getOperand()); if (operand == null) { pushUnknown(); } else { operand.accept(this); PsiType type = expression.getType(); PsiPrimitiveType unboxed = PsiPrimitiveType.getUnboxedType(type); generateBoxingUnboxingInstructionFor(operand, unboxed == null ? type : unboxed); if (PsiUtil.isIncrementDecrementOperation(expression)) { pushUnknown(); addInstruction(new AssignInstruction(operand, null, myFactory.createValue(operand))); } else if (expression.getOperationTokenType() == JavaTokenType.EXCL) { addInstruction(new NotInstruction(expression)); } else if (expression.getOperationTokenType() == JavaTokenType.MINUS && (PsiType.INT.equals(type) || PsiType.LONG.equals(type))) { addInstruction(new PushInstruction(myFactory.getConstFactory().createDefault(type), null)); addInstruction(new SwapInstruction()); addInstruction(new BinopInstruction(expression.getOperationTokenType(), expression, type)); } else { addInstruction(new PopInstruction()); pushUnknown(); } } } finishElement(expression); } @Override public void visitReferenceExpression(PsiReferenceExpression expression) { startElement(expression); final PsiExpression qualifierExpression = expression.getQualifierExpression(); if (qualifierExpression != null) { PsiElement target = expression.resolve(); if (!(target instanceof PsiMember) || !((PsiMember)target).hasModifierProperty(PsiModifier.STATIC)) { qualifierExpression.accept(this); addInstruction(target instanceof PsiField ? new DereferenceInstruction(qualifierExpression) : new PopInstruction()); } } // complex assignments (e.g. "|=") are both reading and writing boolean writing = PsiUtil.isAccessedForWriting(expression) && !PsiUtil.isAccessedForReading(expression); addInstruction(new PushInstruction(myFactory.createValue(expression), expression, writing)); finishElement(expression); } @Override public void visitLiteralExpression(PsiLiteralExpression expression) { startElement(expression); DfaValue dfaValue = myFactory.createLiteralValue(expression); addInstruction(new PushInstruction(dfaValue, expression)); finishElement(expression); } @Override public void visitTypeCastExpression(PsiTypeCastExpression castExpression) { startElement(castExpression); PsiExpression operand = castExpression.getOperand(); if (operand != null) { operand.accept(this); generateBoxingUnboxingInstructionFor(operand, castExpression.getType()); } else { addInstruction(new PushInstruction(myFactory.createTypeValue(castExpression.getType(), Nullability.UNKNOWN), null)); } final PsiTypeElement typeElement = castExpression.getCastType(); if (typeElement != null && operand != null && operand.getType() != null) { if (typeElement.getType() instanceof PsiPrimitiveType && !UnnecessaryExplicitNumericCastInspection.isUnnecessaryPrimitiveNumericCast(castExpression)) { addInstruction(new PopInstruction()); pushUnknown(); } else { addInstruction(new TypeCastInstruction(castExpression, operand, typeElement.getType())); } } finishElement(castExpression); } @Override public void visitClass(PsiClass aClass) { } /** * Inline code block (lambda or method body) into this CFG. Incoming parameters are assumed to be handled already (if necessary) * * @param block block to inline * @param resultNullability desired nullability returned by block return statement * @param target a variable to store the block result (returned via {@code return} statement) */ void inlineBlock(@NotNull PsiCodeBlock block, @NotNull Nullability resultNullability, @NotNull DfaVariableValue target) { InlinedBlockContext oldBlock = myInlinedBlockContext; // Transfer value is pushed to avoid emptying stack beyond this point pushTrap(new Trap.InsideInlinedBlock(block)); addInstruction(new PushInstruction(myFactory.controlTransfer(ReturnTransfer.INSTANCE, FList.emptyList()), null)); myInlinedBlockContext = new InlinedBlockContext(block, resultNullability == Nullability.NOT_NULL, target); startElement(block); block.accept(this); finishElement(block); myInlinedBlockContext = oldBlock; popTrap(Trap.InsideInlinedBlock.class); // Pop transfer value addInstruction(new PopInstruction()); } /** * Create a synthetic variable (not declared in the original code) to be used within this control flow. * * @param type a type of variable to create * @return newly created variable */ @NotNull DfaVariableValue createTempVariable(@Nullable PsiType type) { if(type == null) { type = PsiType.VOID; } return getFactory().getVarFactory().createVariableValue(new Synthetic(getInstructionCount()), type); } /** * Checks whether supplied variable is a temporary variable created previously via {@link #createTempVariable(PsiType)} * * @param variable to check * @return true if supplied variable is a temp variable. */ public static boolean isTempVariable(@NotNull DfaVariableValue variable) { return variable.getSource() instanceof Synthetic; } /** * @param expression expression to test * @return true if some inliner may add constraints on the precise type of given expression */ public static boolean inlinerMayInferPreciseType(PsiExpression expression) { return Arrays.stream(INLINERS).anyMatch(inliner -> inliner.mayInferPreciseType(expression)); } private static final class Synthetic implements DfaVariableSource { private final int myLocation; private Synthetic(int location) { myLocation = location; } @NotNull @Override public String toString() { return "tmp$" + myLocation; } @Override public boolean isStable() { return true; } } public static class InlinedBlockContext { final PsiCodeBlock myCodeBlock; final boolean myForceNonNullBlockResult; final DfaVariableValue myTarget; public InlinedBlockContext(PsiCodeBlock codeBlock, boolean forceNonNullBlockResult, DfaVariableValue target) { myCodeBlock = codeBlock; myForceNonNullBlockResult = forceNonNullBlockResult; myTarget = target; } } static final CallInliner[] INLINERS = {new OptionalChainInliner(), new LambdaInliner(), new CollectionFactoryInliner(), new StreamChainInliner(), new MapUpdateInliner(), new AssumeInliner(), new ClassMethodsInliner(), new AssertAllInliner()}; }
package ca.uhn.fhir.model.primitive; import static org.junit.jupiter.api.Assertions.*; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.model.dstu.composite.CodingDt; import ca.uhn.fhir.util.TestUtil; public class CodingDtTest { private static FhirContext ourCtx = FhirContext.forDstu1(); @Test public void testTokenNoSystem() { CodingDt dt = new CodingDt(); dt.setValueAsQueryToken(ourCtx, null, null, "c"); assertEquals(null, dt.getSystem().getValueAsString()); assertEquals("c", dt.getCode().getValue()); assertEquals("c", dt.getValueAsQueryToken(ourCtx)); } @Test public void testTokenWithPipeInValue() { CodingDt dt = new CodingDt(); dt.setValueAsQueryToken(ourCtx, null, null, "a|b|c"); assertEquals("a", dt.getSystem().getValueAsString()); assertEquals("b|c", dt.getCode().getValue()); assertEquals("a|b\\|c", dt.getValueAsQueryToken(ourCtx)); } @Test public void testTokenWithPipeInValueAndNoSystem() { CodingDt dt = new CodingDt(); dt.setValueAsQueryToken(ourCtx, null, null, "|b\\|c"); assertEquals("", dt.getSystem().getValueAsString()); assertEquals("b|c", dt.getCode().getValue()); assertEquals("|b\\|c", dt.getValueAsQueryToken(ourCtx)); } /** * Technically the second pipe should have been escaped.. But we should be nice about it */ @Test public void testTokenWithPipeInValueAndNoSystemAndBeLenient() { CodingDt dt = new CodingDt(); dt.setValueAsQueryToken(ourCtx, null, null, "|b|c"); assertEquals("", dt.getSystem().getValueAsString()); assertEquals("b|c", dt.getCode().getValue()); assertEquals("|b\\|c", dt.getValueAsQueryToken(ourCtx)); } @AfterAll public static void afterClassClearContext() { TestUtil.clearAllStaticFieldsForUnitTest(); } }
package de.webalf.slotbot.assembler.api; import de.webalf.slotbot.model.Squad; import de.webalf.slotbot.model.dtos.api.SquadApiDto; import de.webalf.slotbot.model.dtos.api.SquadApiViewDto; import lombok.RequiredArgsConstructor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.util.List; import java.util.stream.Collectors; import java.util.stream.StreamSupport; /** * @author Alf * @since 22.02.2021 */ @Component @RequiredArgsConstructor(onConstructor_ = @Autowired) public class SquadApiAssembler { private final SlotApiAssembler slotApiAssembler; private static SquadApiDto toDto(Squad squad) { return SquadApiDto.builder() .id(squad.getId()) .name(squad.getName()) .slotList(SlotApiAssembler.toDtoList(squad.getSlotList())) .build(); } private SquadApiViewDto toViewDto(Squad squad) { return SquadApiViewDto.builder() .name(squad.getName()) .slotList(slotApiAssembler.toViewDtoList(squad.getSlotList())) .build(); } public static List<SquadApiDto> toDtoList(Iterable<? extends Squad> squadList) { return StreamSupport.stream(squadList.spliterator(), false) .map(SquadApiAssembler::toDto) .collect(Collectors.toList()); } public List<SquadApiViewDto> toViewDtoList(Iterable<? extends Squad> squadList) { return StreamSupport.stream(squadList.spliterator(), false) .map(this::toViewDto) .collect(Collectors.toList()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sease.rre.core.domain.metrics; import java.util.HashMap; import java.util.Map; /** * Utility methods for manipulating Metric implementations. * * @author Matt Pearce (matt@flax.co.uk) */ public abstract class MetricUtils { // Map of name values which could be used in a database/search engine private static final Map<String, String> SANITISED_NAME_MAP = new HashMap<>(); static { SANITISED_NAME_MAP.put("AP", "ap"); SANITISED_NAME_MAP.put("F0.5", "f0Point5"); SANITISED_NAME_MAP.put("F1", "f1"); SANITISED_NAME_MAP.put("F2", "f2"); SANITISED_NAME_MAP.put("NDCG@10", "ndcgAt10"); SANITISED_NAME_MAP.put("P", "p"); SANITISED_NAME_MAP.put("P@1", "pAt1"); SANITISED_NAME_MAP.put("P@2", "pAt2"); SANITISED_NAME_MAP.put("P@3", "pAt3"); SANITISED_NAME_MAP.put("P@10", "pAt10"); SANITISED_NAME_MAP.put("R", "r"); SANITISED_NAME_MAP.put("RR@10", "rrAt10"); } /** * Get the sanitised name for a {@link Metric} - eg. one that could be * used in a database or search engine field name. * <p> * Names will be camel-cased, for the most part, with '@' and '.' symbols * converted to words. * * @param m the metric. * @return the sanitised version of the metric name. */ public static String sanitiseName(final Metric m) { final String ret; if (SANITISED_NAME_MAP.containsKey(m.getName())) { ret = SANITISED_NAME_MAP.get(m.getName()); } else { // Do some basic sanitisation ourselves ret = m.getName().toLowerCase() .replace("@", "At") .replace(".", "Point"); } return ret; } }
// Copyright 2008 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.opengse.core; import com.google.opengse.UnwrittenTestCase; /** * Unit test for {@link HttpServer}. */ public class HttpServerTest extends UnwrittenTestCase { }
package io.agora.meeting.annotaion.message; import androidx.annotation.IntDef; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import io.agora.meeting.annotaion.member.Role; @IntDef({PeerCmd.ADMIN, PeerCmd.NORMAL}) @Retention(RetentionPolicy.SOURCE) public @interface PeerCmd { /** * {@link Role#HOST} actions */ int ADMIN = 1; /** * {@link Role#AUDIENCE} actions */ int NORMAL = 2; }
package com.atguigu.gulimall.product.vo; import lombok.Data; import java.math.BigDecimal; /** * @Description TODO * @Author 鲁班不会飞 * @Date 2020/5/11 19:53 * @Version 1.0 **/ @Data public class SecKillInfoVo { /** * id */ private Long id; /** * 活动id */ private Long promotionId; /** * 活动场次id */ private Long promotionSessionId; /** * 商品id */ private Long skuId; /** * 商品秒杀的随机码 */ private String randomCode; /** * 秒杀价格 */ private BigDecimal seckillPrice; /** * 秒杀总量 */ private BigDecimal seckillCount; /** * 每人限购数量 */ private BigDecimal seckillLimit; /** * 排序 */ private Integer seckillSort; private Long startTime; private Long endTime; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.shardingsphere.database.protocol.mysql.packet.command; import lombok.Getter; import lombok.RequiredArgsConstructor; import org.apache.shardingsphere.database.protocol.packet.CommandPacketType; import java.util.HashMap; import java.util.Map; /** * Command packet type for MySQL. */ @RequiredArgsConstructor @Getter public enum MySQLCommandPacketType implements CommandPacketType { /** * COM_SLEEP. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-sleep.html">COM_SLEEP</a> */ COM_SLEEP(0x00), /** * COM_QUIT. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-quit.html">COM_QUIT</a> */ COM_QUIT(0x01), /** * COM_INIT_DB. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-init-db.html">COM_INIT_DB</a> */ COM_INIT_DB(0x02), /** * COM_QUERY. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-sleep.html#packet-COM_QUERY">COM_QUERY</a> */ COM_QUERY(0x03), /** * COM_FIELD_LIST. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-sleep.html#packet-COM_FIELD_LIST">COM_FIELD_LIST</a> */ COM_FIELD_LIST(0x04), /** * COM_CREATE_DB. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-sleep.html#packet-COM_CREATE_DB">COM_CREATE_DB</a> */ COM_CREATE_DB(0x05), /** * COM_DROP_DB. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-create-db.html">COM_DROP_DB</a> */ COM_DROP_DB(0x06), /** * COM_REFRESH. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-refresh.html">COM_REFRESH</a> */ COM_REFRESH(0x07), /** * COM_SHUTDOWN. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-shutdown.html">COM_SHUTDOWN</a> */ COM_SHUTDOWN(0x08), /** * COM_STATISTICS. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-statistics.html#packet-COM_STATISTICS">COM_STATISTICS</a> */ COM_STATISTICS(0x09), /** * COM_PROCESS_INFO. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-process-info.html">COM_PROCESS_INFO</a> */ COM_PROCESS_INFO(0x0a), /** * COM_CONNECT. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-connect.html">COM_CONNECT</a> */ COM_CONNECT(0x0b), /** * COM_PROCESS_KILL. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-process-kill.html">COM_PROCESS_KILL</a> */ COM_PROCESS_KILL(0x0c), /** * COM_DEBUG. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-debug.html">COM_DEBUG</a> */ COM_DEBUG(0x0d), /** * COM_PING. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-ping.html">COM_PING</a> */ COM_PING(0x0e), /** * COM_TIME. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-time.html">COM_TIME</a> */ COM_TIME(0x0f), /** * COM_DELAYED_INSERT. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-time.html">COM_DELAYED_INSERT</a> */ COM_DELAYED_INSERT(0x10), /** * COM_CHANGE_USER. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-sleep.html#packet-COM_CHANGE_USER">COM_CHANGE_USER</a> */ COM_CHANGE_USER(0x11), /** * COM_BINLOG_DUMP. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-change-user.html">COM_BINLOG_DUMP</a> */ COM_BINLOG_DUMP(0x12), /** * COM_TABLE_DUMP. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-table-dump.html">COM_TABLE_DUMP</a> */ COM_TABLE_DUMP(0x13), /** * COM_CONNECT_OUT. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-connect-out.html">COM_CONNECT_OUT</a> */ COM_CONNECT_OUT(0x14), /** * COM_REGISTER_SLAVE. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-register-slave.html">COM_REGISTER_SLAVE</a> */ COM_REGISTER_SLAVE(0x15), /** * COM_STMT_PREPARE. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-stmt-prepare.html">COM_STMT_PREPARE</a> */ COM_STMT_PREPARE(0x16), /** * COM_STMT_EXECUTE. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-stmt-execute.html">COM_STMT_EXECUTE</a> */ COM_STMT_EXECUTE(0x17), /** * COM_STMT_SEND_LONG_DATA. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-stmt-send-long-data.html">COM_STMT_SEND_LONG_DATA</a> */ COM_STMT_SEND_LONG_DATA(0x18), /** * COM_STMT_CLOSE. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-stmt-close.html">COM_STMT_CLOSE</a> */ COM_STMT_CLOSE(0x19), /** * COM_STMT_RESET. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-stmt-reset.html">COM_STMT_RESET</a> */ COM_STMT_RESET(0x1a), /** * COM_SET_OPTION. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-set-option.html">COM_SET_OPTION</a> */ COM_SET_OPTION(0x1b), /** * COM_STMT_FETCH. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-stmt-fetch.html">COM_STMT_FETCH</a> */ COM_STMT_FETCH(0x1c), /** * COM_DAEMON. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-daemon.html">COM_DAEMON</a> */ COM_DAEMON(0x1d), /** * COM_BINLOG_DUMP_GTID. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-binlog-dump-gtid.html">COM_BINLOG_DUMP_GTID</a> */ COM_BINLOG_DUMP_GTID(0x1e), /** * COM_RESET_CONNECTION. * * @see <a href="https://dev.mysql.com/doc/internals/en/com-reset-connection.html">COM_RESET_CONNECTION</a> */ COM_RESET_CONNECTION(0x1f); private static final Map<Integer, MySQLCommandPacketType> MYSQL_COMMAND_PACKET_TYPE_CACHE = new HashMap<Integer, MySQLCommandPacketType>() { { for (MySQLCommandPacketType each : MySQLCommandPacketType.values()) { this.put(each.value, each); } } }; private final int value; /** * Value of integer. * * @param value integer value * @return command packet type enum */ public static MySQLCommandPacketType valueOf(final int value) { MySQLCommandPacketType result = MYSQL_COMMAND_PACKET_TYPE_CACHE.get(value); if (null == result) { throw new IllegalArgumentException(String.format("Cannot find '%s' in command packet type", value)); } return result; } }
/** * Copyright (C) 2017 Bruno Candido Volpato da Cunha (brunocvcunha@gmail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ${package}; import org.junit.Test; import static org.junit.Assert.*; public class AppTest { //TODO: build tests! @Test public void testSimple() { assertTrue(true); } }
package FixCollection; import java.io.*; import java.util.*; import java.util.concurrent.ConcurrentSkipListSet; /** * InputFolder:Patches OutputFolder:PatchesAfterFilter1 * * FilterOne consist of: 1. remove total diffs to not MainSourceFiles 2. remove * total diffs that are adding or deleting files 3. remove total diffs that * contain no change 4. delete NullPatch after the above three steps * */ public class PatchFilterOne { static ArrayList<String> mainSourceFileSuffixList = new ArrayList<String>(); static String diffStarter = null; private String lineBreak = System.getProperty("line.separator"); private String pathSep = File.separator; public void performFirstFilter(String projectName, String patchFolderPath, String patchAfterFilter1FolderPath) { mainSourceFileSuffixList.clear(); if (projectName.equals("EclipseJDTCore")) {// .java(Java) mainSourceFileSuffixList.add(".java"); diffStarter = "diff --git"; } else if (projectName.equals("MozillaFirfox")) {// .c,.cpp,cxx,.cc(C和C++) mainSourceFileSuffixList.add(".c"); mainSourceFileSuffixList.add(".cpp"); mainSourceFileSuffixList.add(".cxx"); mainSourceFileSuffixList.add(".cc"); diffStarter = "diff -r"; } else if (projectName.equals("LibreOffice")) {// .cpp,cxx,.cc(C++) mainSourceFileSuffixList.add(".cpp"); mainSourceFileSuffixList.add(".cxx"); mainSourceFileSuffixList.add(".cc"); diffStarter = "diff --git"; } removeNotJavaFileDiff(patchFolderPath, patchAfterFilter1FolderPath); deleteNullPatchFile(patchAfterFilter1FolderPath); } private void removeNotJavaFileDiff(String patchFolderPath, String patchAfterFilter1FolderPath) { // create patchAfterFilter1Folder File patchAfterFilter1Dir = new File(patchAfterFilter1FolderPath); if (!patchAfterFilter1Dir.exists()) patchAfterFilter1Dir.mkdir(); ArrayList<String> patchFilePathList = new ArrayList<String>(); File dir = new File(patchFolderPath); File[] patchFiles = dir.listFiles(); if (patchFiles == null) { System.err.println("No Patch Files Found!"); return; } for (int i = 0; i < patchFiles.length; i++) { String patchFilePath = patchFiles[i].getAbsolutePath(); patchFilePathList.add(patchFilePath); } BufferedReader br; BufferedWriter bw; String readStr; for (String patchFilePath : patchFilePathList) { if (patchFilePath.endsWith(".DS_Store")) continue; try { br = new BufferedReader(new InputStreamReader(new FileInputStream(patchFilePath))); String patchFileName = patchFilePath.substring(patchFilePath.lastIndexOf(pathSep) + 1); File fileFilteredPatch = new File(patchAfterFilter1FolderPath + patchFileName); if (!fileFilteredPatch.exists()) fileFilteredPatch.createNewFile(); bw = new BufferedWriter(new FileWriter(fileFilteredPatch)); readStr = null; Vector<String> oneDiffContent = new Vector<String>(); while ((readStr = br.readLine()) != null) { if (readStr.startsWith(diffStarter)) { if (oneDiffContent.size() > 0) {// process the content // of previous diff processSingleDiff(oneDiffContent, bw); oneDiffContent.clear(); } oneDiffContent.addElement(readStr); } else { oneDiffContent.addElement(readStr); } } // process the last diff if (oneDiffContent.size() > 0) processSingleDiff(oneDiffContent, bw); oneDiffContent.clear(); bw.flush(); bw.close(); } catch (IOException e) { e.printStackTrace(); } } } private void processSingleDiff(Vector<String> diffContent, BufferedWriter bw) { Boolean isMainSourceFileDiff = false; Boolean isAddDeleteFile = false; Boolean containsModify = false; // check the changed file's suffix String diff = diffContent.elementAt(0); for (String mainSourceFileSuffix : mainSourceFileSuffixList) { if (diff.substring(diff.lastIndexOf(".")).equals(mainSourceFileSuffix)) { isMainSourceFileDiff = true; } } // check whether the change only contains adding or deleting a file, // whether the change contains modification for (int i = 0; i < diffContent.size(); i++) { if (diffContent.elementAt(i).contains("/dev/null") || diffContent.elementAt(i).contains("new file mode") || diffContent.elementAt(i).contains("deleted file mode")) { isAddDeleteFile = true; } if (diffContent.elementAt(i).startsWith("+") || diffContent.elementAt(i).startsWith("-")) { if (!(diffContent.elementAt(i).startsWith("+++") || diffContent.elementAt(i).startsWith("---"))) { containsModify = true; } } } // if the change is made to main source files, and not adding or // deleting a file, and contains modification, it will be reserved. try { if (isMainSourceFileDiff && !isAddDeleteFile && containsModify) { for (int i = 0; i < diffContent.size(); i++) { bw.write(diffContent.elementAt(i) + lineBreak); } } } catch (IOException e) { e.printStackTrace(); } } private void deleteNullPatchFile(String patchFolderAfterFilter1) { File file = new File(patchFolderAfterFilter1); File[] fileList = file.listFiles(); if (fileList == null) return; else { for (File tmp : fileList) { if (tmp.length() == 0) { tmp.delete(); } } } } }
// automatically generated, do not modify package com.st.logger.core; import java.nio.*; import java.lang.*; import com.google.flatbuffers.*; public class logcatInfo extends Table { public static logcatInfo getRootAslogcatInfo(ByteBuffer _bb) { _bb.order(ByteOrder.LITTLE_ENDIAN); return (new logcatInfo()).__init(_bb.getInt(_bb.position()) + _bb.position(), _bb); } public logcatInfo __init(int _i, ByteBuffer _bb) { bb_pos = _i; bb = _bb; return this; } /// priority public byte priority() { int o = __offset(4); return o != 0 ? bb.get(o + bb_pos) : 0; } /// seconds since Epoch public long seconds() { int o = __offset(6); return o != 0 ? bb.getLong(o + bb_pos) : 0; } /// nanoseconds public long nanoSeconds() { int o = __offset(8); return o != 0 ? bb.getLong(o + bb_pos) : 0; } /// generating process's pid public int pid() { int o = __offset(10); return o != 0 ? bb.getInt(o + bb_pos) : 0; } /// generating process's tid public int tid() { int o = __offset(12); return o != 0 ? bb.getInt(o + bb_pos) : 0; } /// Tag public String tag() { int o = __offset(14); return o != 0 ? __string(o + bb_pos) : null; } public ByteBuffer tagAsByteBuffer() { return __vector_as_bytebuffer(14, 1); } /// message public String message() { int o = __offset(16); return o != 0 ? __string(o + bb_pos) : null; } public ByteBuffer messageAsByteBuffer() { return __vector_as_bytebuffer(16, 1); } public static int createlogcatInfo(FlatBufferBuilder builder, byte priority, long seconds, long nanoSeconds, int pid, int tid, int tag, int message) { builder.startObject(7); logcatInfo.addNanoSeconds(builder, nanoSeconds); logcatInfo.addSeconds(builder, seconds); logcatInfo.addMessage(builder, message); logcatInfo.addTag(builder, tag); logcatInfo.addTid(builder, tid); logcatInfo.addPid(builder, pid); logcatInfo.addPriority(builder, priority); return logcatInfo.endlogcatInfo(builder); } public static void startlogcatInfo(FlatBufferBuilder builder) { builder.startObject(7); } public static void addPriority(FlatBufferBuilder builder, byte priority) { builder.addByte(0, priority, 0); } public static void addSeconds(FlatBufferBuilder builder, long seconds) { builder.addLong(1, seconds, 0); } public static void addNanoSeconds(FlatBufferBuilder builder, long nanoSeconds) { builder.addLong(2, nanoSeconds, 0); } public static void addPid(FlatBufferBuilder builder, int pid) { builder.addInt(3, pid, 0); } public static void addTid(FlatBufferBuilder builder, int tid) { builder.addInt(4, tid, 0); } public static void addTag(FlatBufferBuilder builder, int tagOffset) { builder.addOffset(5, tagOffset, 0); } public static void addMessage(FlatBufferBuilder builder, int messageOffset) { builder.addOffset(6, messageOffset, 0); } public static int endlogcatInfo(FlatBufferBuilder builder) { int o = builder.endObject(); return o; } };
/*-------------------------------------------------------------------------+ | | | Copyright 2005-2011 The ConQAT Project | | | | Licensed under the Apache License, Version 2.0 (the "License"); | | you may not use this file except in compliance with the License. | | You may obtain a copy of the License at | | | | http://www.apache.org/licenses/LICENSE-2.0 | | | | Unless required by applicable law or agreed to in writing, software | | distributed under the License is distributed on an "AS IS" BASIS, | | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | | See the License for the specific language governing permissions and | | limitations under the License. | +-------------------------------------------------------------------------*/ package org.conqat.engine.core.bundle; import java.io.File; import java.io.IOException; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Task; /** * This tasks reads a bundle descriptor and stores the following information as * ANT properties. * * <ul> * <li>Property {@value #PROPERTY_BUNDLE_ID} -> Bundle id</li> * <li>Property {@value #PROPERTY_BUNDLE_VERSION} -> Bundle version</li> * </ul> * * @author Florian Deissenboeck * @author $Author: kinnen $ * @version $Rev: 41751 $ * @ConQAT.Rating GREEN Hash: 656B672268CD5BC513B7ECE0AC8DB583 */ public class BundleInfoTask extends Task { /** Constant for bundle id property. */ public static final String PROPERTY_BUNDLE_ID = "bundle.id"; /** Constant for bundle version property. */ public static final String PROPERTY_BUNDLE_VERSION = "bundle.version"; /** Path to the bundle.xml */ private File bundleLocation; /** Set bundle path. */ public void setBundlePath(String bundlePath) { bundleLocation = new File(bundlePath); } /** * Execute task. This reads the bundle descriptor and sets the properties. * * @throws BuildException * if loading the bundle descriptor failed. */ @Override public void execute() throws BuildException { if (bundleLocation == null) { throw new BuildException("Bundle path undefined!"); } BundleInfo bundleInfo = readBundleInfo(); getProject().setProperty(PROPERTY_BUNDLE_ID, bundleInfo.getId()); getProject().setProperty(PROPERTY_BUNDLE_VERSION, bundleInfo.getVersion().toString()); } /** * Load bundle descriptor. * * @throws BuildException * if loading the bundle descriptor failed. */ private BundleInfo readBundleInfo() throws BuildException { try { bundleLocation = bundleLocation.getCanonicalFile(); } catch (IOException e) { throw new BuildException("Error creating canonical file: " + e.getMessage(), e); } if (!bundleLocation.isDirectory()) { throw new BuildException("Bundle location must be a directory!"); } try { BundleInfo bundleInfo = new BundleInfo(bundleLocation); File descriptor = bundleInfo.getDescriptor(); if (!descriptor.isFile()) { throw new BuildException( "Location does not contain a bundle descriptor."); } BundleDescriptorReader reader = new BundleDescriptorReader( descriptor); reader.read(bundleInfo); return bundleInfo; } catch (BundleException e) { throw new BuildException(e.getMessage(), e); } catch (IOException e) { throw new BuildException("File not found: ", e); } } }
package com.antgroup.zmxy.openplatform.api.request; import java.util.Map; import com.antgroup.zmxy.openplatform.api.ZhimaRequest; import com.antgroup.zmxy.openplatform.api.internal.util.ZhimaHashMap; import com.antgroup.zmxy.openplatform.api.response.ZhimaMerchantOrderConfirmResponse; /** * ALIPAY API: zhima.merchant.order.confirm request * * @author auto create * @since 1.0, 2017-02-17 12:22:35 */ public class ZhimaMerchantOrderConfirmRequest implements ZhimaRequest<ZhimaMerchantOrderConfirmResponse> { private ZhimaHashMap udfParams; // add user-defined text parameters private String apiVersion="1.0"; /** * */ private String orderNo; /** * transaction_id是代表一笔请求的唯一标志,该标识作为对账的关键信息,对于用户使用相同transaction_id的查询,芝麻在一天(86400秒)内返回首次查询数据,超过有效期的查询即为无效并返回异常,有效期内的反复查询不重新计费。 transaction_id 推荐生成方式是:30位,(其中17位时间值(精确到毫秒):yyyyMMddHHmmssSSS)加上(13位自增数字:1234567890123) */ private String transactionId; public void setOrderNo(String orderNo) { this.orderNo = orderNo; } public String getOrderNo() { return this.orderNo; } public void setTransactionId(String transactionId) { this.transactionId = transactionId; } public String getTransactionId() { return this.transactionId; } private String channel; private String platform; private String scene; private String extParams; public String getApiVersion() { return this.apiVersion; } public void setApiVersion(String apiVersion) { this.apiVersion = apiVersion; } public void setChannel(String channel){ this.channel=channel; } public String getChannel(){ return this.channel; } public void setPlatform(String platform){ this.platform=platform; } public String getPlatform(){ return this.platform; } public void setScene(String scene){ this.scene=scene; } public String getScene(){ return this.scene; } public void setExtParams(String extParams){ this.extParams=extParams; } public String getExtParams(){ return this.extParams; } public String getApiMethodName() { return "zhima.merchant.order.confirm"; } public Map<String, String> getTextParams() { ZhimaHashMap txtParams = new ZhimaHashMap(); txtParams.put("order_no", this.orderNo); txtParams.put("transaction_id", this.transactionId); if(udfParams != null) { txtParams.putAll(this.udfParams); } return txtParams; } public void putOtherTextParam(String key, String value) { if(this.udfParams == null) { this.udfParams = new ZhimaHashMap(); } this.udfParams.put(key, value); } public Class<ZhimaMerchantOrderConfirmResponse> getResponseClass() { return ZhimaMerchantOrderConfirmResponse.class; } }
/* * Copyright 2016 Juan Manuel Fernandez * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.juanmf.java2plant.render.filters; import java.util.HashSet; import java.util.Set; import java.util.regex.Pattern; /** * @param <C> the item type that will be filtered * * @author juanmf@gmail.com */ public class AllowedRexegFilter<C extends Class<?>> extends NotifyingFilter<C> { protected Set<Pattern> allowedPatterns = new HashSet<>(); public AllowedRexegFilter() { super(); } public AllowedRexegFilter(NotifierOnFiltering<C> notifier) { super(notifier); } public void addAllowedItem(Pattern pattern) { allowedPatterns.add(pattern); } public boolean removeAllowedItem(Pattern pattern) { return allowedPatterns.remove(pattern); } @Override protected boolean doSatisfy(C item) { for (Pattern p : allowedPatterns) { if (p.matcher(item.getName()).matches()) { return true; } } return false; } }
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2020-2022 The JReleaser authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jreleaser.sdk.twitter; import org.jreleaser.util.JReleaserLogger; import static java.util.Objects.requireNonNull; import static org.jreleaser.util.StringUtils.requireNonBlank; /** * @author Andres Almiray * @since 0.1.0 */ abstract class AbstractTwitterCommand implements TwitterCommand { protected final boolean dryrun; protected final Twitter twitter; protected AbstractTwitterCommand(JReleaserLogger logger, String apiHost, int connectTimeout, int readTimeout, String consumerKey, String consumerToken, String accessToken, String accessTokenSecret, boolean dryrun) { this.twitter = new Twitter(logger, apiHost, connectTimeout, readTimeout, consumerKey, consumerToken, accessToken, accessTokenSecret, dryrun); this.dryrun = dryrun; } static class Builder<S extends Builder<S>> { protected final JReleaserLogger logger; protected boolean dryrun; protected String consumerKey; protected String consumerToken; protected String accessToken; protected String accessTokenSecret; protected String apiHost = "https://api.twitter.com/1.1/"; protected int connectTimeout = 20; protected int readTimeout = 60; protected Builder(JReleaserLogger logger) { this.logger = requireNonNull(logger, "'logger' must not be null"); } @SuppressWarnings("unchecked") protected final S self() { return (S) this; } public S dryrun(boolean dryrun) { this.dryrun = dryrun; return self(); } public S consumerKey(String consumerKey) { this.consumerKey = requireNonBlank(consumerKey, "'consumerKey' must not be blank").trim(); return self(); } public S consumerToken(String consumerToken) { this.consumerToken = requireNonBlank(consumerToken, "'consumerToken' must not be blank").trim(); return self(); } public S accessToken(String accessToken) { this.accessToken = requireNonBlank(accessToken, "'accessToken' must not be blank").trim(); return self(); } public S accessTokenSecret(String accessTokenSecret) { this.accessTokenSecret = requireNonBlank(accessTokenSecret, "'accessTokenSecret' must not be blank").trim(); return self(); } public S apiHost(String apiHost) { this.apiHost = requireNonBlank(apiHost, "'apiHost' must not be blank").trim(); return self(); } public S connectTimeout(int connectTimeout) { this.connectTimeout = connectTimeout; return self(); } public S readTimeout(int readTimeout) { this.readTimeout = readTimeout; return self(); } protected void validate() { requireNonBlank(apiHost, "'apiHost' must not be blank"); requireNonBlank(consumerKey, "'consumerKey' must not be blank"); requireNonBlank(consumerToken, "'consumerToken' must not be blank"); requireNonBlank(accessToken, "'accessToken' must not be blank"); requireNonBlank(accessTokenSecret, "'accessTokenSecret' must not be blank"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.ml.composition.boosting; import org.apache.ignite.ml.composition.boosting.loss.SquaredError; import org.apache.ignite.ml.dataset.DatasetBuilder; import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; import java.io.Serializable; /** * Trainer for regressor using Gradient Boosting. This algorithm uses gradient of Mean squared error loss metric [MSE] * in each step of learning. */ public abstract class GDBRegressionTrainer extends GDBTrainer { /** * Constructs instance of GDBRegressionTrainer. * * @param gradStepSize Grad step size. * @param cntOfIterations Count of learning iterations. */ public GDBRegressionTrainer(double gradStepSize, Integer cntOfIterations) { super(gradStepSize, cntOfIterations, new SquaredError()); } /** {@inheritDoc} */ @Override protected <V, K, C extends Serializable> boolean learnLabels(DatasetBuilder<K, V> builder, Vectorizer<K, V, C, Double> vectorizer) { return true; } /** {@inheritDoc} */ @Override protected double externalLabelToInternal(double x) { return x; } /** {@inheritDoc} */ @Override protected double internalLabelToExternal(double x) { return x; } /** {@inheritDoc} */ @Override public GDBRegressionTrainer withEnvironmentBuilder(LearningEnvironmentBuilder envBuilder) { return (GDBRegressionTrainer)super.withEnvironmentBuilder(envBuilder); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.redshift.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * The result of the <code>DeleteSnapshotCopyGrant</code> action. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/DeleteSnapshotCopyGrant" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DeleteSnapshotCopyGrantRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the snapshot copy grant to delete. * </p> */ private String snapshotCopyGrantName; /** * <p> * The name of the snapshot copy grant to delete. * </p> * * @param snapshotCopyGrantName * The name of the snapshot copy grant to delete. */ public void setSnapshotCopyGrantName(String snapshotCopyGrantName) { this.snapshotCopyGrantName = snapshotCopyGrantName; } /** * <p> * The name of the snapshot copy grant to delete. * </p> * * @return The name of the snapshot copy grant to delete. */ public String getSnapshotCopyGrantName() { return this.snapshotCopyGrantName; } /** * <p> * The name of the snapshot copy grant to delete. * </p> * * @param snapshotCopyGrantName * The name of the snapshot copy grant to delete. * @return Returns a reference to this object so that method calls can be chained together. */ public DeleteSnapshotCopyGrantRequest withSnapshotCopyGrantName(String snapshotCopyGrantName) { setSnapshotCopyGrantName(snapshotCopyGrantName); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getSnapshotCopyGrantName() != null) sb.append("SnapshotCopyGrantName: ").append(getSnapshotCopyGrantName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DeleteSnapshotCopyGrantRequest == false) return false; DeleteSnapshotCopyGrantRequest other = (DeleteSnapshotCopyGrantRequest) obj; if (other.getSnapshotCopyGrantName() == null ^ this.getSnapshotCopyGrantName() == null) return false; if (other.getSnapshotCopyGrantName() != null && other.getSnapshotCopyGrantName().equals(this.getSnapshotCopyGrantName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getSnapshotCopyGrantName() == null) ? 0 : getSnapshotCopyGrantName().hashCode()); return hashCode; } @Override public DeleteSnapshotCopyGrantRequest clone() { return (DeleteSnapshotCopyGrantRequest) super.clone(); } }
/* * Copyright 2018 JDCLOUD.COM * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http:#www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * * * * * Contact: * * NOTE: This class is auto generated by the jdcloud code generator program. */ package com.jdcloud.sdk.service.disk.model; import java.util.List; import java.util.ArrayList; /** * 联系人信息。请求参数中,sms和email中至少一个为 &#x60;发送&#x60; */ public class ContactInfo implements java.io.Serializable { private static final long serialVersionUID = 1L; /** * 是否发送短信。0:不发送 1:发送 */ private Integer sms; /** * 是否发送短信。0:不发送 1:发送 */ private Integer email; /** * 联系人id,默认为空 */ private List<Integer> personIds; /** * 联系组id,默认为空 */ private List<Integer> groupIds; /** * get 是否发送短信。0:不发送 1:发送 * * @return */ public Integer getSms() { return sms; } /** * set 是否发送短信。0:不发送 1:发送 * * @param sms */ public void setSms(Integer sms) { this.sms = sms; } /** * get 是否发送短信。0:不发送 1:发送 * * @return */ public Integer getEmail() { return email; } /** * set 是否发送短信。0:不发送 1:发送 * * @param email */ public void setEmail(Integer email) { this.email = email; } /** * get 联系人id,默认为空 * * @return */ public List<Integer> getPersonIds() { return personIds; } /** * set 联系人id,默认为空 * * @param personIds */ public void setPersonIds(List<Integer> personIds) { this.personIds = personIds; } /** * get 联系组id,默认为空 * * @return */ public List<Integer> getGroupIds() { return groupIds; } /** * set 联系组id,默认为空 * * @param groupIds */ public void setGroupIds(List<Integer> groupIds) { this.groupIds = groupIds; } /** * set 是否发送短信。0:不发送 1:发送 * * @param sms */ public ContactInfo sms(Integer sms) { this.sms = sms; return this; } /** * set 是否发送短信。0:不发送 1:发送 * * @param email */ public ContactInfo email(Integer email) { this.email = email; return this; } /** * set 联系人id,默认为空 * * @param personIds */ public ContactInfo personIds(List<Integer> personIds) { this.personIds = personIds; return this; } /** * set 联系组id,默认为空 * * @param groupIds */ public ContactInfo groupIds(List<Integer> groupIds) { this.groupIds = groupIds; return this; } /** * add item to 联系人id,默认为空 * * @param personId */ public void addPersonId(Integer personId) { if (this.personIds == null) { this.personIds = new ArrayList<>(); } this.personIds.add(personId); } /** * add item to 联系组id,默认为空 * * @param groupId */ public void addGroupId(Integer groupId) { if (this.groupIds == null) { this.groupIds = new ArrayList<>(); } this.groupIds.add(groupId); } }
package com.sforce.soap.partner; /** * Generated by ComplexTypeCodeGenerator.java. Please do not edit. */ public interface IRelatedContent { /** * element : relatedContentItems of type {urn:partner.soap.sforce.com}DescribeRelatedContentItem * java type: com.sforce.soap.partner.DescribeRelatedContentItem[] */ public com.sforce.soap.partner.IDescribeRelatedContentItem[] getRelatedContentItems(); public void setRelatedContentItems(com.sforce.soap.partner.IDescribeRelatedContentItem[] relatedContentItems); }
package com.ext.nacid.regprof.web.handlers.impl; import javax.servlet.ServletContext; public class LogoutHandler extends com.ext.nacid.web.handlers.impl.LogoutHandler { public LogoutHandler(ServletContext servletContext) { super(servletContext); } }
/* * Copyright 2003-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.mps.nodeEditor; import com.intellij.openapi.ui.JBPopupMenu; import com.intellij.openapi.ui.Messages; import com.intellij.util.ui.JBUI.Borders; import jetbrains.mps.errors.item.NodeReportItem; import jetbrains.mps.errors.item.RuleIdFlavouredItem; import jetbrains.mps.errors.item.RuleIdFlavouredItem.TypesystemRuleId; import jetbrains.mps.ide.ThreadUtils; import jetbrains.mps.openapi.navigation.EditorNavigator; import jetbrains.mps.project.MPSProject; import jetbrains.mps.project.Project; import org.jetbrains.mps.openapi.model.SNodeReference; import javax.swing.AbstractAction; import javax.swing.JButton; import javax.swing.JDialog; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JPopupMenu; import javax.swing.JTextField; import javax.swing.border.EmptyBorder; import java.awt.BorderLayout; import java.awt.Dialog; import java.awt.Frame; import java.awt.GridLayout; import java.awt.HeadlessException; import java.awt.Window; import java.awt.event.ActionEvent; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class MPSErrorDialog extends JDialog { private static final int MIN_SIDE_PADDING = 30; private List<JButton> myButtons = new ArrayList<>(); private boolean myIsInitialized = false; private JTextField myField; private final Window myOwner; private String myErrorString; private KeyListener myEscapeListener = new KeyAdapter() { @Override public void keyPressed(KeyEvent e) { if (e.getKeyCode() == KeyEvent.VK_ESCAPE) { dispose(); e.consume(); } } }; public MPSErrorDialog(Frame frame, String text, String title) { this(frame, text, title, true); } public MPSErrorDialog(Window window, String text, String title, boolean initializeUI) throws HeadlessException { super(window, title, Dialog.DEFAULT_MODALITY_TYPE); myOwner = window; init(text); if (initializeUI) { initializeUI(); setVisible(true); } } static void showCellErrorDialog(Project project, Window window, HighlighterMessage message) { if (message == null || message.getReportItem() == null) { return; } final NodeReportItem herror = message.getReportItem(); ThreadUtils.runInUIThreadNoWait(() -> { String msg = message.getMessage(); // assuming no html wrapping going below final MPSErrorDialog dialog = new MPSErrorDialog(window, msg, message.getStatus().getPresentation(), false); List<TypesystemRuleId> ruleIds = new ArrayList<>(RuleIdFlavouredItem.FLAVOUR_RULE_ID.getCollection(herror)); if (!ruleIds.isEmpty()) { final JButton button = new JButton(); AbstractAction action = new GoToRuleAction("Go To Rule", ruleIds, dialog, button, project); button.setAction(action); dialog.addButton(button); } dialog.initializeUI(); dialog.setVisible(true); }); } private void init(String error) { myErrorString = error; setLayout(new BorderLayout()); myField = new JTextField(error); myField.setEditable(false); myField.addKeyListener(myEscapeListener); JButton button = new JButton(new AbstractAction("OK") { @Override public void actionPerformed(ActionEvent e) { dispose(); } }); getRootPane().setDefaultButton(button); myButtons.add(button); } public void initializeUI() { int textWidth = myField.getFontMetrics(myField.getFont()).stringWidth(myErrorString); JPanel panel = new JPanel(new GridLayout(1, myButtons.size())); for (JButton jButton : myButtons) { panel.add(jButton); } panel.doLayout(); int buttonsWidth = (int) panel.getPreferredSize().getWidth(); int minPanelWidth = Math.max(2 * MIN_SIDE_PADDING + buttonsWidth, 2 * MIN_SIDE_PADDING + textWidth); int calculatedButtonsPadding = (minPanelWidth - buttonsWidth) / 2; int calculatedTextPadding = (minPanelWidth - textWidth) / 2; panel.setBorder(Borders.empty(5, calculatedButtonsPadding, 15, calculatedButtonsPadding)); myField.setBorder(Borders.empty(20, calculatedTextPadding, 5, calculatedTextPadding)); add(myField, BorderLayout.CENTER); add(panel, BorderLayout.SOUTH); pack(); setResizable(false); setLocation(myOwner.getX() + (myOwner.getWidth() - this.getWidth()) / 2, myOwner.getY() + (myOwner.getHeight() - this.getHeight()) / 2); myIsInitialized = true; } public void addButton(JButton button) { myButtons.add(button); myIsInitialized = false; } @Override public void setVisible(boolean b) { assert !b || myIsInitialized; super.setVisible(b); } private static class GoToRuleAction extends AbstractAction { private final List<TypesystemRuleId> myRuleIds; private final MPSErrorDialog myDialog; private final JButton myButton; private final Project myProject; public GoToRuleAction(String message, List<TypesystemRuleId> ruleIds, MPSErrorDialog dialog, JButton button, Project project) { super(message); myRuleIds = ruleIds; myDialog = dialog; myButton = button; myProject = project; } @Override public void actionPerformed(ActionEvent e) { if (myRuleIds.size() > 1) { JBPopupMenu popupMenu = new JBPopupMenu(); List<TypesystemRuleId> ruleIds = new ArrayList<>(myRuleIds); while (ruleIds.size() > 1) { TypesystemRuleId ruleId = ruleIds.remove(ruleIds.size() - 1); popupMenu.add(new GoToRuleAction("Go To Rule " + ruleId, Collections.singletonList(ruleId), myDialog, myButton, myProject)); } popupMenu.add(new GoToRuleAction("Go To Immediate Rule", Collections.singletonList(ruleIds.remove(0)), myDialog, myButton, myProject)); popupMenu.show(myButton, 0, myButton.getHeight()); } else { SNodeReference sourceNode = myRuleIds.get(0).getSourceNode(); if (sourceNode == null) { Messages.showWarningDialog(((MPSProject) myProject).getProject(), "Impossible to find rule source node", "No Rule Declaration"); return; } new EditorNavigator(myProject).shallSelect(true).open(sourceNode); myDialog.dispose(); } } } }
package ru.job4j.tictactoe.cell; public interface ICellFactory { /** * Returns free cell. * * @return Free cell. */ ICell getFree(); /** * Returns cell with X mark * * @return Cell with X mark. */ ICell getMarkX(); /** * Returns cell with O mark * * @return Cell with O mark. */ ICell getMarkO(); }
package cat.xarxarepublicana.hashtagsxrep.infrastructure.repository.jdbc; import cat.xarxarepublicana.hashtagsxrep.domain.user.User; import cat.xarxarepublicana.hashtagsxrep.domain.user.UserRepository; import cat.xarxarepublicana.hashtagsxrep.infrastructure.repository.jdbc.mapper.UserMapper; import java.util.List; public class JdbcUserRepository implements UserRepository { private final UserMapper userMapper; public JdbcUserRepository(UserMapper userMapper) { this.userMapper = userMapper; } @Override public User findById(String id) { return userMapper.selectOneById(id); } @Override public User findByNickname(String nickname) { return userMapper.selectOneByNickname(nickname); } @Override public void saveLoggedUser(User user) { if (userMapper.exists(user.getId())) { userMapper.updateCredentialsData(user); } else { userMapper.insert(user); } } @Override public void saveExtractedUser(User user) { if (userMapper.exists(user.getId())) { userMapper.updateTwitterData(user); } else { userMapper.insert(user); } } @Override public List<User> findByGroupId(String groupId) { return userMapper.selectByMembership(groupId); } }
/* * Tencent is pleased to support the open source community by making QMUI_Android available. * * Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved. * * Licensed under the MIT License (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://opensource.org/licenses/MIT * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package com.qmuiteam.qmui.layout; import android.content.Context; import android.graphics.Canvas; import androidx.annotation.ColorInt; import android.util.AttributeSet; import com.qmuiteam.qmui.alpha.QMUIAlphaButton; /** * Created by cgspine on 2018/3/1. */ public class QMUIButton extends QMUIAlphaButton implements IQMUILayout { private QMUILayoutHelper mLayoutHelper; public QMUIButton(Context context) { super(context); init(context, null, 0); } public QMUIButton(Context context, AttributeSet attrs) { super(context, attrs); init(context, attrs, 0); } public QMUIButton(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context, attrs, defStyleAttr); } private void init(Context context, AttributeSet attrs, int defStyleAttr) { mLayoutHelper = new QMUILayoutHelper(context, attrs, defStyleAttr, this); setChangeAlphaWhenDisable(false); setChangeAlphaWhenPress(false); } @Override public void updateTopDivider(int topInsetLeft, int topInsetRight, int topDividerHeight, int topDividerColor) { mLayoutHelper.updateTopDivider(topInsetLeft, topInsetRight, topDividerHeight, topDividerColor); invalidate(); } @Override public void updateBottomDivider(int bottomInsetLeft, int bottomInsetRight, int bottomDividerHeight, int bottomDividerColor) { mLayoutHelper.updateBottomDivider(bottomInsetLeft, bottomInsetRight, bottomDividerHeight, bottomDividerColor); invalidate(); } @Override public void updateLeftDivider(int leftInsetTop, int leftInsetBottom, int leftDividerWidth, int leftDividerColor) { mLayoutHelper.updateLeftDivider(leftInsetTop, leftInsetBottom, leftDividerWidth, leftDividerColor); invalidate(); } public void updateRightDivider(int rightInsetTop, int rightInsetBottom, int rightDividerWidth, int rightDividerColor) { mLayoutHelper.updateRightDivider(rightInsetTop, rightInsetBottom, rightDividerWidth, rightDividerColor); invalidate(); } @Override public void onlyShowTopDivider(int topInsetLeft, int topInsetRight, int topDividerHeight, int topDividerColor) { mLayoutHelper.onlyShowTopDivider(topInsetLeft, topInsetRight, topDividerHeight, topDividerColor); invalidate(); } @Override public void onlyShowBottomDivider(int bottomInsetLeft, int bottomInsetRight, int bottomDividerHeight, int bottomDividerColor) { mLayoutHelper.onlyShowBottomDivider(bottomInsetLeft, bottomInsetRight, bottomDividerHeight, bottomDividerColor); invalidate(); } @Override public void onlyShowLeftDivider(int leftInsetTop, int leftInsetBottom, int leftDividerWidth, int leftDividerColor) { mLayoutHelper.onlyShowLeftDivider(leftInsetTop, leftInsetBottom, leftDividerWidth, leftDividerColor); invalidate(); } @Override public void onlyShowRightDivider(int rightInsetTop, int rightInsetBottom, int rightDividerWidth, int rightDividerColor) { mLayoutHelper.onlyShowRightDivider(rightInsetTop, rightInsetBottom, rightDividerWidth, rightDividerColor); invalidate(); } @Override public void setTopDividerAlpha(int dividerAlpha) { mLayoutHelper.setTopDividerAlpha(dividerAlpha); invalidate(); } @Override public void setBottomDividerAlpha(int dividerAlpha) { mLayoutHelper.setBottomDividerAlpha(dividerAlpha); invalidate(); } @Override public void setLeftDividerAlpha(int dividerAlpha) { mLayoutHelper.setLeftDividerAlpha(dividerAlpha); invalidate(); } @Override public void setRightDividerAlpha(int dividerAlpha) { mLayoutHelper.setRightDividerAlpha(dividerAlpha); invalidate(); } @Override public void setHideRadiusSide(int hideRadiusSide) { mLayoutHelper.setHideRadiusSide(hideRadiusSide); invalidate(); } @Override public int getHideRadiusSide() { return mLayoutHelper.getHideRadiusSide(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { widthMeasureSpec = mLayoutHelper.getMeasuredWidthSpec(widthMeasureSpec); heightMeasureSpec = mLayoutHelper.getMeasuredHeightSpec(heightMeasureSpec); super.onMeasure(widthMeasureSpec, heightMeasureSpec); int minW = mLayoutHelper.handleMiniWidth(widthMeasureSpec, getMeasuredWidth()); int minH = mLayoutHelper.handleMiniHeight(heightMeasureSpec, getMeasuredHeight()); if (widthMeasureSpec != minW || heightMeasureSpec != minH) { super.onMeasure(minW, minH); } } @Override public void setRadiusAndShadow(int radius, int shadowElevation, final float shadowAlpha) { mLayoutHelper.setRadiusAndShadow(radius, shadowElevation, shadowAlpha); } @Override public void setRadiusAndShadow(int radius, @QMUILayoutHelper.HideRadiusSide int hideRadiusSide, int shadowElevation, final float shadowAlpha) { mLayoutHelper.setRadiusAndShadow(radius, hideRadiusSide, shadowElevation, shadowAlpha); } @Override public void setRadiusAndShadow(int radius, int hideRadiusSide, int shadowElevation, int shadowColor, float shadowAlpha) { mLayoutHelper.setRadiusAndShadow(radius, hideRadiusSide, shadowElevation, shadowColor, shadowAlpha); } @Override public void setRadius(int radius) { mLayoutHelper.setRadius(radius); } @Override public void setRadius(int radius, @QMUILayoutHelper.HideRadiusSide int hideRadiusSide) { mLayoutHelper.setRadius(radius, hideRadiusSide); } @Override public int getRadius() { return mLayoutHelper.getRadius(); } @Override public void setOutlineInset(int left, int top, int right, int bottom) { mLayoutHelper.setOutlineInset(left, top, right, bottom); } @Override public void setBorderColor(@ColorInt int borderColor) { mLayoutHelper.setBorderColor(borderColor); invalidate(); } @Override public void setBorderWidth(int borderWidth) { mLayoutHelper.setBorderWidth(borderWidth); invalidate(); } @Override public void setShowBorderOnlyBeforeL(boolean showBorderOnlyBeforeL) { mLayoutHelper.setShowBorderOnlyBeforeL(showBorderOnlyBeforeL); invalidate(); } @Override public boolean setWidthLimit(int widthLimit) { if (mLayoutHelper.setWidthLimit(widthLimit)) { requestLayout(); invalidate(); } return true; } @Override public boolean setHeightLimit(int heightLimit) { if (mLayoutHelper.setHeightLimit(heightLimit)) { requestLayout(); invalidate(); } return true; } @Override public void setUseThemeGeneralShadowElevation() { mLayoutHelper.setUseThemeGeneralShadowElevation(); } @Override public void setOutlineExcludePadding(boolean outlineExcludePadding) { mLayoutHelper.setOutlineExcludePadding(outlineExcludePadding); } @Override public void setShadowElevation(int elevation) { mLayoutHelper.setShadowElevation(elevation); } @Override public int getShadowElevation() { return mLayoutHelper.getShadowElevation(); } @Override public void setShadowAlpha(float shadowAlpha) { mLayoutHelper.setShadowAlpha(shadowAlpha); } @Override public float getShadowAlpha() { return mLayoutHelper.getShadowAlpha(); } @Override public void setShadowColor(int shadowColor) { mLayoutHelper.setShadowColor(shadowColor); } @Override public int getShadowColor() { return mLayoutHelper.getShadowColor(); } @Override public void setOuterNormalColor(int color) { mLayoutHelper.setOuterNormalColor(color); } @Override protected void dispatchDraw(Canvas canvas) { super.dispatchDraw(canvas); mLayoutHelper.drawDividers(canvas, getWidth(), getHeight()); mLayoutHelper.dispatchRoundBorderDraw(canvas); } }
/* Copyright 2017 Zutubi Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zutubi.tove.type; import com.zutubi.util.StringUtils; /** * Type used for enum-valued properties. They are similar to simple string * values, except are converted to the enums on instantiated objects and * allow more smarts (e.g. default presentation of options to select). */ public class EnumType extends SimpleType { public <T extends Enum<T>> EnumType(Class<T> clazz) { super(clazz); } @SuppressWarnings({"unchecked"}) public Class<? extends Enum> getClazz() { return (Class<? extends Enum>) super.getClazz(); } public Object instantiate(Object data, Instantiator instantiator) throws TypeException { String s = (String) data; if (StringUtils.stringSet(s)) { try { return Enum.valueOf(getClazz(), s); } catch (IllegalArgumentException e) { throw new TypeException("Illegal enumeration value '" + data.toString() + "'"); } } else { return null; } } public Object unstantiate(Object instance, String templateOwnerPath) throws TypeException { if (instance == null) { return ""; } else { return instance.toString(); } } public Object toXmlRpc(String templateOwnerPath, Object data) throws TypeException { // Leave it as is (a string). return data; } public String fromXmlRpc(String templateOwnerPath, Object data, boolean applyDefaults) throws TypeException { typeCheck(data, String.class); return (String) data; } }
/* * Copyright (c) 2008-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cometd.javascript; import org.junit.Assert; import org.junit.Test; public class CometDBatchPublishTest extends AbstractCometDTest { @Test public void testBatchPublish() throws Exception { defineClass(Latch.class); evaluateScript("var latch = new Latch(1);"); Latch latch = get("latch"); evaluateScript("" + "var _connected = false;" + "cometd.addListener('/meta/connect', function(message)" + "{" + " var wasConnected = _connected;" + " _connected = message.successful;" + " if (!wasConnected && _connected)" + " {" + " cometd.startBatch();" + " cometd.subscribe('/echo', latch, 'countDown');" + " cometd.publish('/echo', 'test');" + " cometd.endBatch();" + " }" + "});" + "cometd.configure({url: '" + cometdURL + "', logLevel: '" + getLogLevel() + "'});" + "cometd.handshake();"); Assert.assertTrue(latch.await(5000)); evaluateScript("cometd.disconnect(true);"); } }
package org.testobject.kernel.imaging.segmentation; import org.testobject.commons.math.algebra.Size; /** * * @author enijkamp * */ public final class TestUtils { private TestUtils() { } public static ArrayRaster init(int[][] pixels) { if (pixels.length == 0 || pixels[0].length == 0) { return new ArrayRaster(new boolean[][] {}, new Size.Int(0, 0)); } Size.Int size = new Size.Int(pixels[0].length, pixels.length); boolean[][] raster = new boolean[size.h][size.w]; for (int y = 0; y < size.h; y++) { for (int x = 0; x < size.w; x++) { raster[y][x] = pixels[y][x] != 0; } } return new ArrayRaster(raster, size); } }
package com.distrimind.bouncycastle.pqc.jcajce.interfaces; import java.security.Key; public interface NHKey extends Key { }
package com.github.liuweijw.api.security.auth.jwt; import java.util.List; import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import org.springframework.security.web.util.matcher.AntPathRequestMatcher; import org.springframework.security.web.util.matcher.OrRequestMatcher; import org.springframework.security.web.util.matcher.RequestMatcher; import org.springframework.util.Assert; /** * SkipPathRequestMatcher * * @author liuweijw */ public class SkipPathRequestMatcher implements RequestMatcher { private OrRequestMatcher matchers; private RequestMatcher processingMatcher; public SkipPathRequestMatcher(List<String> pathsToSkip, String processingPath) { Assert.notNull(pathsToSkip, "paths to skip is null"); List<RequestMatcher> m = pathsToSkip.stream() .map(path -> new AntPathRequestMatcher(path)) .collect(Collectors.toList()); matchers = new OrRequestMatcher(m); processingMatcher = new AntPathRequestMatcher(processingPath); } @Override public boolean matches(HttpServletRequest request) { if (matchers.matches(request)) { return false; } return processingMatcher.matches(request) ? true : false; } }
/* * Copyright (C) 2017-2018 Dremio Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.dremio.service.jobs; import com.dremio.service.job.proto.JobId; /** * Wrap {@code JobData} instance into another instance */ public class JobDataWrapper implements JobData { private final JobData delegate; public JobDataWrapper(JobData delegate) { this.delegate = delegate; } @Override public void close() throws Exception { delegate.close(); } @Override public JobDataFragment range(int offset, int limit) { return delegate.range(offset, limit); } @Override public JobDataFragment truncate(int maxRows) { return delegate.truncate(maxRows); } @Override public JobId getJobId() { return delegate.getJobId(); } @Override public String getJobResultsTable() { return delegate.getJobResultsTable(); } @Override public void loadIfNecessary() { delegate.loadIfNecessary(); } }
/** * Copyright (C) 2010-2013 Alibaba Group Holding Limited * <p/> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.rocketmq.tools.command.topic; import com.alibaba.rocketmq.client.exception.MQClientException; import com.alibaba.rocketmq.common.MixAll; import com.alibaba.rocketmq.common.UtilAll; import com.alibaba.rocketmq.common.protocol.body.ClusterInfo; import com.alibaba.rocketmq.common.protocol.body.GroupList; import com.alibaba.rocketmq.common.protocol.body.TopicList; import com.alibaba.rocketmq.common.protocol.route.BrokerData; import com.alibaba.rocketmq.common.protocol.route.TopicRouteData; import com.alibaba.rocketmq.remoting.RPCHook; import com.alibaba.rocketmq.remoting.exception.RemotingException; import com.alibaba.rocketmq.tools.admin.DefaultMQAdminExt; import com.alibaba.rocketmq.tools.command.SubCommand; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import java.util.Iterator; import java.util.Map.Entry; import java.util.Set; /** * 查看Topic统计信息,包括offset、最后更新时间 * * @author shijia.wxr<vintage.wang@gmail.com> * @since 2013-8-3 */ public class TopicListSubCommand implements SubCommand { @Override public String commandName() { return "topicList"; } @Override public String commandDesc() { return "Fetch all topic list from name server"; } @Override public Options buildCommandlineOptions(Options options) { Option opt = new Option("c", "clusterModel", false, "clusterModel"); opt.setRequired(false); options.addOption(opt); return options; } private String findTopicBelongToWhichCluster(final String topic, final ClusterInfo clusterInfo, final DefaultMQAdminExt defaultMQAdminExt) throws RemotingException, MQClientException, InterruptedException { TopicRouteData topicRouteData = defaultMQAdminExt.examineTopicRouteInfo(topic); BrokerData brokerData = topicRouteData.getBrokerDatas().get(0); String brokerName = brokerData.getBrokerName(); Iterator<Entry<String, Set<String>>> it = clusterInfo.getClusterAddrTable().entrySet().iterator(); while (it.hasNext()) { Entry<String, Set<String>> next = it.next(); if (next.getValue().contains(brokerName)) { return next.getKey(); } } return null; } @Override public void execute(final CommandLine commandLine, final Options options, RPCHook rpcHook) { DefaultMQAdminExt defaultMQAdminExt = new DefaultMQAdminExt(rpcHook); defaultMQAdminExt.setInstanceName(Long.toString(System.currentTimeMillis())); try { defaultMQAdminExt.start(); if (commandLine.hasOption('c')) { ClusterInfo clusterInfo = defaultMQAdminExt.examineBrokerClusterInfo(); System.out.printf("%-20s %-48s %-48s\n",// "#Cluster Name",// "#Topic",// "#Consumer Group"// ); TopicList topicList = defaultMQAdminExt.fetchAllTopicList(); for (String topic : topicList.getTopicList()) { if (topic.startsWith(MixAll.RETRY_GROUP_TOPIC_PREFIX) || topic.startsWith(MixAll.DLQ_GROUP_TOPIC_PREFIX)) { continue; } String clusterName = ""; GroupList groupList = new GroupList(); try { clusterName = this.findTopicBelongToWhichCluster(topic, clusterInfo, defaultMQAdminExt); groupList = defaultMQAdminExt.queryTopicConsumeByWho(topic); } catch (Exception e) { } if (null == groupList || groupList.getGroupList().isEmpty()) { groupList = new GroupList(); groupList.getGroupList().add(""); } for (String group : groupList.getGroupList()) { System.out.printf("%-20s %-48s %-48s\n",// UtilAll.frontStringAtLeast(clusterName, 20),// UtilAll.frontStringAtLeast(topic, 48),// UtilAll.frontStringAtLeast(group, 48)// ); } } } else { TopicList topicList = defaultMQAdminExt.fetchAllTopicList(); for (String topic : topicList.getTopicList()) { System.out.println(topic); } } } catch (Exception e) { e.printStackTrace(); } finally { defaultMQAdminExt.shutdown(); } } }
/* * Copyright (c) 2020. Self learning and applying project advanced concepts through out. */ package com.dhiren.springboot.mongodbdemo.mongo.config; import com.dhiren.springboot.mongodbdemo.mongo.app.MultipleClientConfig; import com.mongodb.*; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoClients; import com.mongodb.connection.ServerSettings; import com.mongodb.connection.SslSettings; import com.mongodb.event.ServerListenerAdapter; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.domain.Sort; import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.config.AbstractMongoClientConfiguration; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.SimpleMongoClientDbFactory; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.index.Index; import org.springframework.data.mongodb.repository.config.EnableMongoRepositories; import java.util.Arrays; import static com.dhiren.springboot.mongodbdemo.mongo.constants.AppConstants.*; @Configuration @EnableMongoRepositories(basePackages = "com.dhiren.springboot.mongodbdemo.mongo.repo") public class MongoTemplateConfig extends AbstractMongoClientConfiguration { private MultipleClientConfig clientConfig; @Autowired public MongoTemplateConfig(MultipleClientConfig clientConfig) { this.clientConfig = clientConfig; } private static void apply(SslSettings.Builder builder) { builder.applySettings(SslSettings.builder().enabled(false).build()); } @Override public MongoClient mongoClient() { MongoCredential credential = MongoCredential.createCredential( USERNAME.getType(), getDatabaseName(), PASSWORD.getType().toCharArray()); return MongoClients.create(MongoClientSettings.builder() .applyToSslSettings(MongoTemplateConfig::apply) .credential(credential) .build()); } @Override protected String getDatabaseName() { return DATABASE_NAME.getType(); } @Bean public MongoTemplate mongoTemplate() { final Index index = new Index(TTL_COLUMN.getType(), Sort.Direction.DESC); final MongoTemplate mongoTemplate = new MongoTemplate(mongoDbFactory()); clientConfig.getClients().forEach(client -> { index.expire(Integer.valueOf(client.getTtl())); mongoTemplate.indexOps(client.getCollection()).ensureIndex(index); }); MappingMongoConverter converter = (MappingMongoConverter) mongoTemplate.getConverter(); converter.setCustomConversions(this.mongoCustomConversions()); converter.afterPropertiesSet(); return mongoTemplate; } @Bean public MongoDbFactory mongoDbFactory() { return new SimpleMongoClientDbFactory(mongoClient(), getDatabaseName()); } @Bean public MongoCustomConversions mongoCustomConversions() { return new MongoCustomConversions( Arrays.asList(new MongoReadConverter(), new MongoWriteConverter()) ); } }
/* * The MIT License * * Copyright 2015 misakura. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package jp.gr.java_conf.kgd.library.water.java.core.value; /** * 値の型を{@link Long}に特化し、デフォルトの挙動を定めた{@link LongSize2}。 * * デフォルトの挙動を定めたものであり、原則としてインターフェース部分に使ってはいけません。 * * @author misakura */ public interface LongSize2Trait extends ObjectSize2<Long>, NumberSize2 { /** * このサイズの幅にあたる値を返す。 * * デフォルトの実装では、{@link #getWidthAsLong()}に委譲し、得られた値を静的キャストして返します。 * * @return {@inheritDoc} */ @Override default int getWidthAsInt() { return (int) getWidthAsLong(); } /** * このサイズの高さにあたる値を返す。 * * デフォルトの実装では、{@link #getHeightAsLong()}に委譲し、得られた値を静的キャストして返します。 * * @return {@inheritDoc} */ @Override default int getHeightAsInt() { return (int) getHeightAsLong(); } /** * このサイズの幅にあたる値を返す。 * * デフォルトの実装では、{@link #getWidthAsLong()}に委譲し、得られた値を静的キャストして返します。 * * @return {@inheritDoc} */ @Override default double getWidthAsDouble() { return (double) getWidthAsLong(); } /** * このサイズの高さにあたる値を返す。 * * デフォルトの実装では、{@link #getHeightAsLong()}に委譲し、得られた値を静的キャストして返します。 * * @return {@inheritDoc} */ @Override default double getHeightAsDouble() { return (double) getHeightAsLong(); } /** * このサイズの幅にあたる値を返す。 * * 得られる値が防御的にコピーされたものであるかどうかは実装依存です。<br> * デフォルトの実装では、{@link #getWidthAsLong()}に委譲し、得られた値をボックス化して返します。(つまり、防御的コピーを行います。) * * @return {@inheritDoc} * @deprecated このインターフェースを直接操作する場合は{@link #getWidthAsLong()}を使うべきです。 */ @Deprecated @Override default Long getWidth() { return getWidthAsLong(); } /** * このサイズの高さにあたる値を返す。 * * 得られる値が防御的にコピーされたものであるかどうかは実装依存です。<br> * デフォルトの実装では、{@link #getHeightAsLong()}に委譲し、得られた値をボックス化して返します。(つまり、防御的コピーを行います。) * * @return {@inheritDoc} * @deprecated このインターフェースを直接操作する場合は{@link #getHeightAsLong()}を使うべきです。 */ @Deprecated @Override default Long getHeight() { return getHeightAsLong(); } }
package com.cy.mobileInterface.around.service; import com.cy.base.entity.Message; public interface AroundService { void getNearbyUser(Message message, String content); }
package cn.lovepet.shops.helper.basequickadapter.listener; import android.view.View; import cn.lovepet.shops.helper.basequickadapter.BaseQuickAdapter; /** * Created by AllenCoder on 2016/8/03. * A convenience class to extend when you only want to OnItemChildLongClickListener for a subset * of all the SimpleClickListener. This implements all methods in the * {@link SimpleClickListener} **/ public abstract class OnItemChildLongClickListener extends SimpleClickListener { @Override public void onItemClick(BaseQuickAdapter adapter, View view, int position) { } @Override public void onItemLongClick(BaseQuickAdapter adapter, View view, int position) { } @Override public void onItemChildClick(BaseQuickAdapter adapter, View view, int position) { } @Override public void onItemChildLongClick(BaseQuickAdapter adapter, View view, int position) { onSimpleItemChildLongClick(adapter,view,position); } public abstract void onSimpleItemChildLongClick(BaseQuickAdapter adapter, View view, int position); }
/* * Copyright (C) 2017-2019 HERE Europe B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * SPDX-License-Identifier: Apache-2.0 * License-Filename: LICENSE */ package com.here.xyz.hub.rest; import static com.here.xyz.hub.rest.Api.HeaderValues.APPLICATION_JSON; import static com.jayway.restassured.RestAssured.given; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.OK; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; public class ModifySpaceApiIT extends TestSpaceWithFeature { @BeforeClass public static void setupClass() { remove(); } @Before public void setup() { createSpace(); } @After public void tearDown() { removeSpace("x-psql-test"); } @Test public void setSearchablePropertiesPositive() { given() .contentType(APPLICATION_JSON) .accept(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_ALL)) .body(content("/xyz/hub/updateSpaceWithSearchableProperties.json")) .when() .patch("/spaces/x-psql-test") .then() .statusCode(OK.code()) .body("searchableProperties.name", equalTo(true)) .body("searchableProperties.other", equalTo(false)); } @Test public void setSearchablePropertiesNegative() { given() .contentType(APPLICATION_JSON) .accept(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_ALL)) .body(content("/xyz/hub/updateSpaceWithSearchablePropertiesConnectorC1.json")) .when() .patch("/spaces/x-psql-test") .then() .statusCode(BAD_REQUEST.code()); } @Test public void removeAllListeners() { addListener("x-psql-test"); given() .contentType(APPLICATION_JSON) .accept(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)) .body("{\"listeners\": null}") .when() .patch("/spaces/x-psql-test") .then() .statusCode(OK.code()) .body("listeners", nullValue()); given() .accept(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)) .when() .get("/spaces/x-psql-test") .then() .statusCode(OK.code()) .body("listeners", nullValue()); } @Test public void removeAllProcessors() { addProcessor("x-psql-test"); given() .contentType(APPLICATION_JSON) .accept(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)) .body("{\"processors\": null}") .when() .patch("/spaces/x-psql-test") .then() .statusCode(OK.code()) .body("processors", nullValue()); given() .accept(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)) .when() .get("/spaces/x-psql-test") .then() .statusCode(OK.code()) .body("processors", nullValue()); } @Test public void addProcessorToExistingSpace() { addProcessor("x-psql-test"); given() .accept(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)) .when() .get("/spaces/x-psql-test") .then() .statusCode(OK.code()) .body("processors", notNullValue()) .body("processors.size()", is(1)) .body("processors.rule-tagger", notNullValue()) .body("processors.rule-tagger.size()", is(1)) .body("processors.rule-tagger[0].eventTypes.size()", is(2)); } @Test public void testConnectorResponseInModifiedSpace() { addProcessor("x-psql-test"); given() .accept(APPLICATION_JSON) .contentType(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_WITH_ACCESS_CONNECTOR_RULE_TAGGER)) .body("{\"description\": \"Added description\"}") .when() .patch("/spaces/x-psql-test") .then() .statusCode(OK.code()) .body("title", is("Test Space Processor")) .body("description", is("Added description")) .body("storage", notNullValue()) .body("storage.id", is("psql")) .body("processors", notNullValue()) .body("processors.size()", is(1)) .body("processors.rule-tagger", notNullValue()) .body("processors.rule-tagger.size()", is(1)) .body("processors.rule-tagger[0].eventTypes.size()", is(2)); } @Test public void testRemoveStorage() { given() .accept(APPLICATION_JSON) .contentType(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_WITH_ACCESS_CONNECTOR_RULE_TAGGER)) .body("{\"storage\": null}") .when() .patch("/spaces/x-psql-test") .then() .statusCode(BAD_REQUEST.code()); } @Test public void patchWithoutChange() { given() .accept(APPLICATION_JSON) .contentType(APPLICATION_JSON) .headers(getAuthHeaders(AuthProfile.ACCESS_OWNER_1_ADMIN)) .body("{\"title\": \"My Demo Space\"}") .when() .patch("/spaces/x-psql-test") .then() .statusCode(OK.code()); } }
package com.savl.ripple.core.types.known.sle.entries; import com.savl.ripple.core.coretypes.AccountID; import com.savl.ripple.core.coretypes.Currency; import com.savl.ripple.core.coretypes.Vector256; import com.savl.ripple.core.coretypes.hash.Hash160; import com.savl.ripple.core.coretypes.hash.Hash256; import com.savl.ripple.core.coretypes.hash.Index; import com.savl.ripple.core.coretypes.uint.UInt64; import com.savl.ripple.core.fields.Field; import com.savl.ripple.core.serialized.enums.LedgerEntryType; import com.savl.ripple.core.types.known.sle.LedgerEntry; public class DirectoryNode extends LedgerEntry { public DirectoryNode() { super(LedgerEntryType.DirectoryNode); } public UInt64 indexNext() {return get(UInt64.IndexNext);} public UInt64 indexPrevious() {return get(UInt64.IndexPrevious);} public UInt64 exchangeRate() {return get(UInt64.ExchangeRate);} public Hash256 rootIndex() {return get(Hash256.RootIndex);} public AccountID owner() {return get(AccountID.Owner);} public Hash160 takerPaysCurrency() {return get(Hash160.TakerPaysCurrency);} public Hash160 takerPaysIssuer() {return get(Hash160.TakerPaysIssuer);} public Hash160 takerGetsCurrency() {return get(Hash160.TakerGetsCurrency);} public Hash160 takerGetsIssuer() {return get(Hash160.TakerGetsIssuer);} public Vector256 indexes() {return get(Vector256.Indexes);} public void indexNext(UInt64 val) {put(Field.IndexNext, val);} public void indexPrevious(UInt64 val) {put(Field.IndexPrevious, val);} public void exchangeRate(UInt64 val) {put(Field.ExchangeRate, val);} public void rootIndex(Hash256 val) {put(Field.RootIndex, val);} public void owner(AccountID val) {put(Field.Owner, val);} public void takerPaysCurrency(Hash160 val) {put(Field.TakerPaysCurrency, val);} public void takerPaysIssuer(Hash160 val) {put(Field.TakerPaysIssuer, val);} public void takerGetsCurrency(Hash160 val) {put(Field.TakerGetsCurrency, val);} public void takerGetsIssuer(Hash160 val) {put(Field.TakerGetsIssuer, val);} public void indexes(Vector256 val) {put(Field.Indexes, val);} public Hash256 nextIndex() { return Index.directoryNode(rootIndex(), indexNext()); } public Hash256 prevIndex() { return Index.directoryNode(rootIndex(), indexPrevious()); } public boolean hasPreviousIndex() { return indexPrevious() != null && !indexPrevious().isZero(); } public boolean hasNextIndex() { return indexNext() != null && !indexNext().isZero(); } public boolean isRootIndex() { return rootIndex().equals(index()); } public void setExchangeDefaults() { if (takerGetsCurrency() == null) { takerGetsCurrency(Currency.XRP); takerGetsIssuer(AccountID.XRP_ISSUER); } else if (takerPaysCurrency() == null) { takerPaysCurrency(Currency.XRP); takerPaysIssuer(AccountID.XRP_ISSUER); } } @Override public void setDefaults() { super.setDefaults(); if (exchangeRate() != null) { setExchangeDefaults(); } if (indexes() == null) { indexes(new Vector256()); } } }
package frc.robot.commands.intake; import edu.wpi.first.wpilibj2.command.CommandBase; import frc.robot.subsystems.Intake; public class moveDownAndIntake extends CommandBase{ Intake m_intake; public moveDownAndIntake(Intake intake) { m_intake = intake; addRequirements(intake); } @Override public void execute() { m_intake.intakeDown(); m_intake.collectBalls(); } @Override public void end(boolean interrupted) { m_intake.intakeUp(); m_intake.stop(); } }
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simpleworkflow.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeWorkflowTypeRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The name of the domain in which this workflow type is registered. * </p> */ private String domain; /** * <p> * The workflow type to describe. * </p> */ private WorkflowType workflowType; /** * <p> * The name of the domain in which this workflow type is registered. * </p> * * @param domain * The name of the domain in which this workflow type is registered. */ public void setDomain(String domain) { this.domain = domain; } /** * <p> * The name of the domain in which this workflow type is registered. * </p> * * @return The name of the domain in which this workflow type is registered. */ public String getDomain() { return this.domain; } /** * <p> * The name of the domain in which this workflow type is registered. * </p> * * @param domain * The name of the domain in which this workflow type is registered. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeWorkflowTypeRequest withDomain(String domain) { setDomain(domain); return this; } /** * <p> * The workflow type to describe. * </p> * * @param workflowType * The workflow type to describe. */ public void setWorkflowType(WorkflowType workflowType) { this.workflowType = workflowType; } /** * <p> * The workflow type to describe. * </p> * * @return The workflow type to describe. */ public WorkflowType getWorkflowType() { return this.workflowType; } /** * <p> * The workflow type to describe. * </p> * * @param workflowType * The workflow type to describe. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeWorkflowTypeRequest withWorkflowType(WorkflowType workflowType) { setWorkflowType(workflowType); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getDomain() != null) sb.append("Domain: ").append(getDomain()).append(","); if (getWorkflowType() != null) sb.append("WorkflowType: ").append(getWorkflowType()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeWorkflowTypeRequest == false) return false; DescribeWorkflowTypeRequest other = (DescribeWorkflowTypeRequest) obj; if (other.getDomain() == null ^ this.getDomain() == null) return false; if (other.getDomain() != null && other.getDomain().equals(this.getDomain()) == false) return false; if (other.getWorkflowType() == null ^ this.getWorkflowType() == null) return false; if (other.getWorkflowType() != null && other.getWorkflowType().equals(this.getWorkflowType()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getDomain() == null) ? 0 : getDomain().hashCode()); hashCode = prime * hashCode + ((getWorkflowType() == null) ? 0 : getWorkflowType().hashCode()); return hashCode; } @Override public DescribeWorkflowTypeRequest clone() { return (DescribeWorkflowTypeRequest) super.clone(); } }
/** */ package gluemodel.COSEM.COSEMObjects.impl; import gluemodel.COSEM.COSEMObjects.COSEMObjectsPackage; import gluemodel.COSEM.COSEMObjects.SAPAssignmentCurrent; import gluemodel.COSEM.InterfaceClasses.impl.SAPAssignmentImpl; import org.eclipse.emf.ecore.EClass; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>SAP Assignment Current</b></em>'. * <!-- end-user-doc --> * * @generated */ public class SAPAssignmentCurrentImpl extends SAPAssignmentImpl implements SAPAssignmentCurrent { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected SAPAssignmentCurrentImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return COSEMObjectsPackage.eINSTANCE.getSAPAssignmentCurrent(); } } //SAPAssignmentCurrentImpl
package subtypes; import org.junit.Test; import subtypes.api.SkillFile; import common.CommonTest; import de.ust.skill.common.java.api.SkillFile.Mode; /** * Simple write tests. * * @author Timm Felden */ @SuppressWarnings("static-method") public class SimpleWrite extends CommonTest { @Test public void createAndWrite() throws Exception { SkillFile sf = SkillFile.open(tmpFile("createAndWrite"), Mode.Create, Mode.Write); C c = sf.Cs().make(); c.a = c; c.c = c; sf.close(); } @Test public void insertAB() throws Exception { SkillFile sf = SkillFile.open(tmpFile("insertAB"), Mode.Create, Mode.Write); sf.As().make(sf.Bs().make()); B b = sf.Bs().iterator().next(); b.a = b; b.b = b; sf.close(); } }
/* * Copyright 2020 phyzicsz <phyzics.z@gmail.com>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.phyzicsz.rocket.symbol; import com.phyzicsz.rocket.symbol.common.SymbologyConstants; import com.phyzicsz.rocket.symbol.render.MilStdSymbolRenderer; import com.phyzicsz.rocket.symbol.common.SymbolServiceProperties; import java.awt.Color; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import javax.imageio.ImageIO; /** * * @author phyzicsz <phyzics.z@gmail.com> */ public class RocketSymbolService { private final MilStdSymbolRenderer renderer = new MilStdSymbolRenderer(); // private final KVStore kv = new KVStore(); private final SymbolServiceProperties props = new SymbolServiceProperties(); public RocketSymbolService() { } public RocketSymbolService withShowIcon(final Boolean value) { props.put(SymbolServiceProperties.SHOW_ICON, value); return this; } public RocketSymbolService withShowFrame(final Boolean value) { props.put(SymbolServiceProperties.SHOW_FRAME, value); return this; } public RocketSymbolService withShowFill(final Boolean value) { props.put(SymbolServiceProperties.SHOW_FILL, value); return this; } public RocketSymbolService withFillColor(final Color value) { props.put(SymbolServiceProperties.COLOR, value); return this; } public BufferedImage asBufferedImage(final String symbolCode) throws IOException { return renderer.createIcon(symbolCode, props); } public byte[] asPng(final String symbolCode) throws IOException { BufferedImage image = renderer.createIcon(symbolCode, props); byte[] bytes; try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { ImageIO.write(image, "png", baos); baos.flush(); bytes = baos.toByteArray(); } return bytes; } public void pngToFile(final String symbolCode, final String path) throws IOException { BufferedImage image = renderer.createIcon(symbolCode, props); byte[] bytes; try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { ImageIO.write(image, "png", baos); baos.flush(); bytes = baos.toByteArray(); } Path filepath = Paths.get(path); Files.write(filepath, bytes); } public void pngToFile(final String symbolCode, final Path path) throws IOException { BufferedImage image = renderer.createIcon(symbolCode, props); byte[] bytes; try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) { ImageIO.write(image, "png", baos); baos.flush(); bytes = baos.toByteArray(); } Files.write(path, bytes); } }
/* Copyright (C) 2013-2021 TU Dortmund * This file is part of LearnLib, http://www.learnlib.de/. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.learnlib.filter.reuse.test; import java.util.function.Supplier; import de.learnlib.algorithms.lstar.mealy.ExtensibleLStarMealyBuilder; import de.learnlib.api.algorithm.LearningAlgorithm.MealyLearner; import de.learnlib.filter.reuse.ReuseCapableOracle; import de.learnlib.filter.reuse.ReuseOracle; import net.automatalib.words.Alphabet; import net.automatalib.words.Word; import net.automatalib.words.WordBuilder; import net.automatalib.words.impl.Alphabets; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; /** * Similar to the {@link LearningTest} but this time with quiescence in outputs. The purpose of this test is just to * check that the reuse filter is able to work with {@code null} outputs. * * @author Oliver Bauer */ public class QuiescenceTest { private ReuseOracle<Integer, Integer, String> reuseOracle; private Alphabet<Integer> sigma; /** * {@inheritDoc}. */ @BeforeClass protected void setUp() { sigma = Alphabets.integers(0, 3); reuseOracle = new ReuseOracle.ReuseOracleBuilder<>(sigma, new TestOracleFactory()).build(); } @Test public void simpleTest() { MealyLearner<Integer, String> learner = new ExtensibleLStarMealyBuilder<Integer, String>().withAlphabet(sigma).withOracle(reuseOracle).create(); learner.startLearning(); } private class TestOracleFactory implements Supplier<ReuseCapableOracle<Integer, Integer, String>> { @Override public ReuseCapableOracle<Integer, Integer, String> get() { return new TestOracle(); } } class TestOracle implements ReuseCapableOracle<Integer, Integer, String> { private final int threshold = 3; @Override public QueryResult<Integer, String> continueQuery(Word<Integer> trace, Integer s) { Integer integer = s; WordBuilder<String> output = new WordBuilder<>(); for (Integer symbol : trace) { if (integer + symbol < threshold) { integer += symbol; output.add("ok"); } else if (integer + symbol == threshold) { integer += symbol; output.add("done"); } else { output.add(null); // quiescence } } QueryResult<Integer, String> result; result = new QueryResult<>(output.toWord(), integer); return result; } @Override public QueryResult<Integer, String> processQuery(Word<Integer> trace) { Integer integer = 0; WordBuilder<String> output = new WordBuilder<>(); for (Integer symbol : trace) { if (integer + symbol < threshold) { integer += symbol; output.add("ok"); } else if (integer + symbol == threshold) { integer += symbol; output.add("done"); } else { output.add(null); // quiescence } } QueryResult<Integer, String> result; result = new QueryResult<>(output.toWord(), integer); return result; } } }
package io.github.janvinas.trensminecat.signactions; import com.bergerkiller.bukkit.common.map.MapDisplay; import com.bergerkiller.bukkit.tc.controller.MinecartGroup; import com.bergerkiller.bukkit.tc.events.SignActionEvent; import com.bergerkiller.bukkit.tc.events.SignChangeActionEvent; import com.bergerkiller.bukkit.tc.signactions.SignAction; import com.bergerkiller.bukkit.tc.signactions.SignActionType; import com.bergerkiller.bukkit.tc.utils.SignBuildOptions; import io.github.janvinas.trensminecat.ManualDisplays; import io.github.janvinas.trensminecat.ManualDisplay; import io.github.janvinas.trensminecat.TrensMinecat; import io.github.janvinas.trensminecat.trainTracker.TrackedTrain; import java.util.Collection; public class SignActionDisplayManual extends SignAction { @Override public boolean match(SignActionEvent info) { return info.isType("displaymanual"); } @Override public boolean canSupportRC() { return false; } @Override public void execute(SignActionEvent info) { String idDisplay = null; String via = null; String displayId = info.getLine(2); String[] line3 = displayId.split(" ", 2); idDisplay = line3[0]; try { via = line3[1]; } catch (Exception e){ via = null; } int clearIn; if(info.getLine(3).length() > 0) { clearIn = Integer.parseInt(info.getLine(3)); } else { clearIn = 0; } if(displayId == null) return; if (info.isTrainSign() && info.isAction(SignActionType.GROUP_ENTER)) { if (!info.isPowered()) return; MinecartGroup train = info.getGroup(); updateDisplay(idDisplay, via, train, clearIn); updateTrackedTrain(info.getGroup(), displayId); } } private boolean updateDisplay(String displayId, String via, MinecartGroup train, int clearIn){ Class<?>[] classes = ManualDisplays.class.getDeclaredClasses(); for (Class<?> c : classes) { @SuppressWarnings("unchecked") Collection<? extends ManualDisplay> displays = MapDisplay.getAllDisplays( (Class<ManualDisplay>) c); displays.forEach(display -> { display.updateInformation(displayId, via, train, clearIn); }); } return true; } private boolean updateTrackedTrain(MinecartGroup m, String displayId){ TrackedTrain train = TrensMinecat.getPlugin(TrensMinecat.class).trainTracker.searchTrain(m); if(train != null){ train.enterStation(displayId); return true; } return false; } @Override public boolean build(SignChangeActionEvent event) { if (!event.isType("displaymanual")) { return false; } return SignBuildOptions.create() .setName("displaymanual") .setDescription("Mostra informació del tren a una pantalla amb el nom donat.") .handle(event.getPlayer()); } }
package software.amazon.lightsail.loadbalancer.helpers.resource; import lombok.RequiredArgsConstructor; import lombok.val; import software.amazon.awssdk.awscore.AwsRequest; import software.amazon.awssdk.awscore.AwsResponse; import software.amazon.awssdk.services.lightsail.LightsailClient; import software.amazon.awssdk.services.lightsail.model.*; import software.amazon.cloudformation.exceptions.CfnNotFoundException; import software.amazon.cloudformation.proxy.Logger; import software.amazon.cloudformation.proxy.ProxyClient; import software.amazon.cloudformation.proxy.ResourceHandlerRequest; import software.amazon.lightsail.loadbalancer.ResourceModel; import java.util.HashSet; import java.util.Set; import static software.amazon.lightsail.loadbalancer.Translator.translateFromReadResponse; /** * Helper class to handle LoadBalancer operations. */ @RequiredArgsConstructor public class LoadBalancer implements ResourceHelper { private final ResourceModel resourceModel; private final Logger logger; private final ProxyClient<LightsailClient> proxyClient; private final ResourceHandlerRequest<ResourceModel> resourceModelRequest; @Override public AwsResponse update(AwsRequest request) { AwsResponse awsResponse = null; return awsResponse; } public AwsResponse updateAttributes(AwsRequest request) { AwsResponse awsResponse = null; ResourceModel currentResourceModel = getCurrentResourceModelFromLightsail(); ResourceModel desiredResourceModel = resourceModelRequest.getDesiredResourceState(); // Updating the HealthCheckPath attribute. if (desiredResourceModel.getHealthCheckPath() != null && !desiredResourceModel.getHealthCheckPath().equals(currentResourceModel.getHealthCheckPath())) { logger.log("Updating LoadBalancer attribute: HealthCheckPath"); proxyClient.injectCredentialsAndInvokeV2(UpdateLoadBalancerAttributeRequest.builder() .loadBalancerName(desiredResourceModel.getLoadBalancerName()).attributeName("HealthCheckPath") .attributeValue(desiredResourceModel.getHealthCheckPath()).build(), proxyClient.client()::updateLoadBalancerAttribute); } // Updating the SessionStickinessEnabled attribute. if (desiredResourceModel.getSessionStickinessEnabled() == null) { if (currentResourceModel.getSessionStickinessEnabled()) { logger.log("Updating LoadBalancer attribute: SessionStickinessEnabled"); proxyClient.injectCredentialsAndInvokeV2(UpdateLoadBalancerAttributeRequest.builder() .loadBalancerName(desiredResourceModel.getLoadBalancerName()).attributeName("SessionStickinessEnabled") .attributeValue("false").build(), proxyClient.client()::updateLoadBalancerAttribute); } } else { if (currentResourceModel.getSessionStickinessEnabled() != desiredResourceModel.getSessionStickinessEnabled()) { logger.log("Updating LoadBalancer attribute: SessionStickinessEnabled"); proxyClient.injectCredentialsAndInvokeV2(UpdateLoadBalancerAttributeRequest.builder() .loadBalancerName(desiredResourceModel.getLoadBalancerName()).attributeName("SessionStickinessEnabled") .attributeValue(String.valueOf(desiredResourceModel.getSessionStickinessEnabled())) .build(), proxyClient.client()::updateLoadBalancerAttribute); } } // Updating the SessionStickiness_LB_CookieDurationSeconds attribute. if (desiredResourceModel.getSessionStickinessLBCookieDurationSeconds() != null) { if (!desiredResourceModel.getSessionStickinessLBCookieDurationSeconds().equals(currentResourceModel.getSessionStickinessLBCookieDurationSeconds())) { logger.log("Updating LoadBalancer attribute: SessionStickiness_LB_CookieDurationSeconds"); proxyClient.injectCredentialsAndInvokeV2(UpdateLoadBalancerAttributeRequest.builder() .loadBalancerName(desiredResourceModel.getLoadBalancerName()).attributeName("SessionStickiness_LB_CookieDurationSeconds") .attributeValue(desiredResourceModel.getSessionStickinessLBCookieDurationSeconds()) .build(), proxyClient.client()::updateLoadBalancerAttribute); } } return awsResponse; } public AwsResponse detachInstances(AwsRequest request) { AwsResponse awsResponse = DetachInstancesFromLoadBalancerResponse.builder().build(); Set<String> desiredInstances = resourceModelRequest.getDesiredResourceState().getAttachedInstances(); Set<String> currentInstances = getCurrentResourceModelFromLightsail().getAttachedInstances(); Set<String> instancesToDetach = setDifference(currentInstances, desiredInstances); logger.log("Instances to detach: " + instancesToDetach.toString()); if (instancesToDetach.size() == 0) { return awsResponse; } val detachInstancesFromLoadBalancerRequest = DetachInstancesFromLoadBalancerRequest.builder() .loadBalancerName(resourceModel.getLoadBalancerName()).instanceNames(instancesToDetach).build(); awsResponse = proxyClient.injectCredentialsAndInvokeV2(detachInstancesFromLoadBalancerRequest, proxyClient.client()::detachInstancesFromLoadBalancer); return awsResponse; } public AwsResponse attachInstances(AwsRequest request) { AwsResponse awsResponse = AttachInstancesToLoadBalancerResponse.builder().build(); Set<String> desiredInstances = resourceModelRequest.getDesiredResourceState().getAttachedInstances(); Set<String> currentInstances = getCurrentResourceModelFromLightsail().getAttachedInstances(); Set<String> instancesToAttach = setDifference(desiredInstances, currentInstances); logger.log("Instances to attach: " + instancesToAttach.toString()); if (instancesToAttach.size() == 0) { return awsResponse; } val attachInstancesToLoadBalancerRequest = AttachInstancesToLoadBalancerRequest.builder() .loadBalancerName(resourceModel.getLoadBalancerName()).instanceNames(instancesToAttach).build(); awsResponse = proxyClient.injectCredentialsAndInvokeV2(attachInstancesToLoadBalancerRequest, proxyClient.client()::attachInstancesToLoadBalancer); return awsResponse; } @Override public AwsResponse create(AwsRequest request) { logger.log(String.format("Creating LoadBalancer: %s", resourceModel.getLoadBalancerName())); AwsResponse awsResponse; awsResponse = proxyClient.injectCredentialsAndInvokeV2(((CreateLoadBalancerRequest) request), proxyClient.client()::createLoadBalancer); logger.log(String.format("Successfully created LoadBalancer: %s", resourceModel.getLoadBalancerName())); return awsResponse; } @Override public AwsResponse delete(AwsRequest request) { logger.log(String.format("Deleting LoadBalancer: %s", resourceModel.getLoadBalancerName())); AwsResponse awsResponse = null; awsResponse = proxyClient.injectCredentialsAndInvokeV2(((DeleteLoadBalancerRequest) request), proxyClient.client()::deleteLoadBalancer); logger.log(String.format("Successfully deleted LoadBalancer: %s", resourceModel.getLoadBalancerName())); return awsResponse; } /** * Read LoadBalancer. * * @param request * * @return AwsResponse */ @Override public AwsResponse read(AwsRequest request) { val loadBalancerName = ((GetLoadBalancerRequest) request).loadBalancerName(); logger.log(String.format("Reading LoadBalancer: %s", loadBalancerName)); return proxyClient.injectCredentialsAndInvokeV2(GetLoadBalancerRequest.builder() .loadBalancerName(loadBalancerName).build(), proxyClient.client()::getLoadBalancer); } @Override public boolean isStabilizedDelete() { final boolean stabilized = false; logger.log(String.format("Checking if LoadBalancer: %s deletion has stabilized.", resourceModel.getLoadBalancerName(), stabilized)); try { this.read(GetLoadBalancerRequest.builder().loadBalancerName(resourceModel.getLoadBalancerName()).build()); } catch (final Exception e) { if (!isSafeExceptionDelete(e)) { throw e; } logger.log(String.format("LoadBalancer: %s deletion has stabilized", resourceModel.getLoadBalancerName())); return true; } return stabilized; } public boolean isStabilizedInstances() { val awsResponse = ((GetLoadBalancerResponse) this .read(GetLoadBalancerRequest.builder().loadBalancerName(resourceModel.getLoadBalancerName()).build())); for (InstanceHealthSummary instanceHealthSummary: awsResponse.loadBalancer().instanceHealthSummary()) { if (instanceHealthSummary.instanceHealth() == InstanceHealthState.INITIAL || instanceHealthSummary.instanceHealth() == InstanceHealthState.DRAINING) { return false; } } return true; } public ResourceModel getCurrentResourceModelFromLightsail() { return translateFromReadResponse(this.read(GetLoadBalancerRequest.builder() .loadBalancerName(resourceModel.getLoadBalancerName()).build())); } public Set<String> setDifference(Set<String> setOne, Set<String> setTwo) { if (setOne == null || setOne.size() == 0) { return new HashSet<>(); } if (setTwo == null || setTwo.size() == 0) { return setOne == null ? new HashSet<>() : setOne; } Set<String> result = new HashSet<String>(setOne); result.removeIf(setTwo::contains); return result; } /** * Get Current state of the LoadBalancer. * * @return * * @param awsResponse */ private String getCurrentState(GetLoadBalancerResponse awsResponse) { val loadBalancer = awsResponse.loadBalancer(); return loadBalancer.state() == null ? "Pending" : loadBalancer.state().name(); } @Override public boolean isStabilizedUpdate() { return false; } @Override public boolean isSafeExceptionCreateOrUpdate(Exception e) { return false; } @Override public boolean isSafeExceptionDelete(Exception e) { if (e instanceof CfnNotFoundException || e instanceof NotFoundException) { return true; // Its stabilized if the resource is gone.. } return false; } }
package com.ibm.swg.wmb.iseries.dataqueue; import com.ibm.as400.access.AS400; import com.ibm.as400.access.AS400Text; import com.ibm.as400.access.BaseDataQueue; import com.ibm.broker.plugin.MbElement; import com.ibm.broker.plugin.MbException; import com.ibm.broker.plugin.MbInputTerminal; import com.ibm.broker.plugin.MbMessage; import com.ibm.broker.plugin.MbMessageAssembly; import com.ibm.broker.plugin.MbNodeInterface; import com.ibm.broker.plugin.MbOutputTerminal; import com.ibm.broker.plugin.MbUserException; import com.ibm.swg.wmb.iseries.DataQueueFlowNode; import com.ibm.swg.wmb.iseries.cache.ConfigurationData; import com.ibm.swg.wmb.iseries.cache.ConnectionCache; import com.ibm.swg.wmb.iseries.communication.ISeriesCall; /** * DataQueueOutputNode.java, May 10, 2011 * * Copyright (c) 2011 International Business Machines Corp. All rights reserved. * * This software is the confidential and proprietary information of the IBM * Corporation. ("Confidential Information"). You shall not disclose such * Confidential Information and shall use it only in accordance with the terms * of the license agreement you entered into with IBM. * * @author Jochen_Benke@de.ibm.com * */ public class DataQueueOutputNode extends DataQueueFlowNode implements MbNodeInterface { /** * @throws MbException */ public DataQueueOutputNode() throws MbException { createInputTerminal("in"); createOutputTerminal("out"); createOutputTerminal("failure"); } /* (non-Javadoc) * @see com.ibm.broker.plugin.MbNodeInterface#evaluate(com.ibm.broker.plugin.MbMessageAssembly, com.ibm.broker.plugin.MbInputTerminal) */ @Override public void evaluate(MbMessageAssembly inAssembly, MbInputTerminal inTerm) throws MbException { MbOutputTerminal out = getOutputTerminal("out"); MbMessage inMessage = inAssembly.getMessage(); MbMessage outMessage = new MbMessage(inMessage); // connection to the AS400 AS400 system = null; loadConfigurableService(); try { // the local environment will overwrite the node properties getPropertiesFromLocalEnvironment(inAssembly); // setTrace directory if (enable == true) { ISeriesCall.enableTrace(buildFileName()); } // connect to i series system = ConnectionCache.getConnection("UserDefined", getConfigurableService()); //get CCSID ConfigurationData configurationData = ConnectionCache.getConfigurationData("UserDefined", getConfigurableService()); String ccsid = configurationData.get_CCSID(); // Message body is the last child of Root MbElement msgBody = inAssembly.getMessage().getRootElement().getLastChild(); // send message to the data queue byte[] message = msgBody.toBitstream("", "", "", 0, 0, 0); // get data queue BaseDataQueue baseDataQueue = dataQueueFactory.getDataQueue(system, library, queueName, type); //translate to target CCSID AS400Text textConverter = new AS400Text(message.length, Integer.parseInt(ccsid), system); String messageStr = new String(message); byte[] messageEBCDIC = textConverter.toBytes(messageStr); // write message to queue dataQueueFactory.writeMessage(baseDataQueue, key, messageEBCDIC, ccsid); // Build the output message assembly before propagating the message. MbMessageAssembly outAssembly = new MbMessageAssembly(inAssembly, inMessage); // The following should only be changed if not propagating message out.propagate(outAssembly); } catch (MbUserException e) { throw e; } finally { // turn off the tracing ISeriesCall.disableTrace(); // return the connection to the iSeries connection pool ConnectionCache.releaseConnection("UserDefined", getConfigurableService(), system); outMessage.clearMessage(); } } /** * Override queue values, if local environment is propagated * * @param inAssembly * @throws MbUserException */ private void getPropertiesFromLocalEnvironment(MbMessageAssembly inAssembly) throws MbUserException { try { // get local environment structure to override the default settings MbElement localEnvironment = inAssembly.getLocalEnvironment().getRootElement().getFirstElementByPath("DataQueue"); if (localEnvironment != null) { // change the data queu name MbElement currentProperty = localEnvironment.getFirstElementByPath("queueName"); if (currentProperty != null) { queueName = (String) currentProperty.getValue(); } // change the library name where the queue ist placed currentProperty = localEnvironment.getFirstElementByPath("library"); if (currentProperty != null) { library = (String) currentProperty.getValue(); } // change the type of the data queue currentProperty = localEnvironment.getFirstElementByPath("type"); if (currentProperty != null) { type = (String) currentProperty.getValue(); } // change the type of the data queue currentProperty = localEnvironment.getFirstElementByPath("keyed"); if (currentProperty != null) { String isKeyed = (String) currentProperty.getValue(); setKeyed(isKeyed); } // change the type of the data queue currentProperty = localEnvironment.getFirstElementByPath("key"); if (currentProperty != null) { key = (String) currentProperty.getValue(); } } } catch (MbException e) { throw new MbUserException(e, "getPropertiesFromLocalEnvironment", "ISeriesProgramCall", e.getMessage(), "Get Local Configuration", null); } } /** * @return */ public static String getNodeName() { return "DataQueueOutputNode"; } }
package com.github.draylar.betterbees.honey; import com.github.draylar.betterbees.registry.BeeBlocks; import com.github.draylar.betterbees.registry.BeeFluids; import com.github.draylar.betterbees.registry.BeeItems; import net.minecraft.block.Block; import net.minecraft.block.BlockState; import net.minecraft.block.FluidBlock; import net.minecraft.block.entity.BlockEntity; import net.minecraft.fluid.BaseFluid; import net.minecraft.fluid.Fluid; import net.minecraft.fluid.FluidState; import net.minecraft.item.Item; import net.minecraft.particle.ParticleEffect; import net.minecraft.particle.ParticleTypes; import net.minecraft.state.StateManager; import net.minecraft.tag.FluidTags; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.Direction; import net.minecraft.world.BlockView; import net.minecraft.world.IWorld; import net.minecraft.world.WorldView; public abstract class HoneyFluid extends BaseFluid { @Override public Fluid getFlowing() { return BeeFluids.FLOWING_HONEY; } @Override public Fluid getStill() { return BeeFluids.HONEY; } @Override public Item getBucketItem() { return BeeItems.HONEY_BUCKET; } @Override protected boolean isInfinite() { return false; } @Override protected void beforeBreakingBlock(IWorld world, BlockPos pos, BlockState state) { BlockEntity blockEntity = state.getBlock().hasBlockEntity() ? world.getBlockEntity(pos) : null; Block.dropStacks(state, world.getWorld(), pos, blockEntity); } // flow speed; 4 is water, 2 is lava @Override protected int method_15733(WorldView worldView) { return 1; } @Override protected int getLevelDecreasePerBlock(WorldView worldView) { return 1; } @Override protected boolean method_15777(FluidState fluidState, BlockView blockView, BlockPos blockPos, Fluid fluid, Direction direction) { return direction == Direction.DOWN && !fluid.matches(FluidTags.WATER); } @Override public int getTickRate(WorldView worldView) { return 40; } @Override protected float getBlastResistance() { return 50; } @Override protected BlockState toBlockState(FluidState fluidState) { return BeeBlocks.HONEY_FLUID.getDefaultState().with(FluidBlock.LEVEL, method_15741(fluidState)); } @Override protected ParticleEffect getParticle() { return ParticleTypes.DRIPPING_HONEY; } @Override public boolean matchesType(Fluid fluid) { return fluid == BeeFluids.FLOWING_HONEY || fluid == BeeFluids.HONEY; } public static class Flowing extends HoneyFluid { @Override public boolean isStill(FluidState fluidState) { return false; } @Override public int getLevel(FluidState fluidState) { return fluidState.get(LEVEL); } @Override protected void appendProperties(StateManager.Builder<Fluid, FluidState> builder) { super.appendProperties(builder); builder.add(LEVEL); } } public static class Still extends HoneyFluid { @Override public boolean isStill(FluidState fluidState) { return true; } @Override public int getLevel(FluidState fluidState) { return 8; } } }
package com.ceiba.servicio.puerto.repositorio; import com.ceiba.servicio.modelo.entidad.Servicio; public interface RepositorioServicio { /** * Permite crear un servicio * @param servicio * @return el id generado */ Long crear(Servicio servicio); /** * Permite actualizar un servicio * @param servicio */ void actualizar(Servicio servicio); /** * Permite eliminar un servicio * @param id */ void eliminar(Long id); /** * Permite validar si existe un servicio con un Origen * @param origen * @return si existe o no */ boolean existe(String origen); /** * Permite validar si existe un servicio con un Origen excluyendo un id * @return si existe o no */ boolean existePorId(Long id); }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.management.impl; import javax.json.JsonArray; import javax.json.JsonArrayBuilder; import javax.json.JsonObject; import javax.json.JsonObjectBuilder; import javax.management.ListenerNotFoundException; import javax.management.MBeanAttributeInfo; import javax.management.MBeanNotificationInfo; import javax.management.MBeanOperationInfo; import javax.management.Notification; import javax.management.NotificationBroadcasterSupport; import javax.management.NotificationEmitter; import javax.management.NotificationFilter; import javax.management.NotificationListener; import javax.transaction.xa.Xid; import java.text.DateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import org.apache.activemq.artemis.api.config.ActiveMQDefaultConfiguration; import org.apache.activemq.artemis.api.core.ActiveMQAddressDoesNotExistException; import org.apache.activemq.artemis.api.core.ActiveMQException; import org.apache.activemq.artemis.api.core.JsonUtil; import org.apache.activemq.artemis.api.core.RoutingType; import org.apache.activemq.artemis.api.core.SimpleString; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.api.core.management.ActiveMQServerControl; import org.apache.activemq.artemis.api.core.management.AddressControl; import org.apache.activemq.artemis.api.core.management.BridgeControl; import org.apache.activemq.artemis.api.core.management.CoreNotificationType; import org.apache.activemq.artemis.api.core.management.DivertControl; import org.apache.activemq.artemis.api.core.management.Parameter; import org.apache.activemq.artemis.api.core.management.QueueControl; import org.apache.activemq.artemis.core.client.impl.Topology; import org.apache.activemq.artemis.core.client.impl.TopologyMemberImpl; import org.apache.activemq.artemis.core.config.BridgeConfiguration; import org.apache.activemq.artemis.core.config.Configuration; import org.apache.activemq.artemis.core.config.ConnectorServiceConfiguration; import org.apache.activemq.artemis.core.config.DivertConfiguration; import org.apache.activemq.artemis.core.config.TransformerConfiguration; import org.apache.activemq.artemis.core.filter.Filter; import org.apache.activemq.artemis.core.management.impl.view.AddressView; import org.apache.activemq.artemis.core.management.impl.view.ConnectionView; import org.apache.activemq.artemis.core.management.impl.view.ConsumerView; import org.apache.activemq.artemis.core.management.impl.view.ProducerView; import org.apache.activemq.artemis.core.management.impl.view.QueueView; import org.apache.activemq.artemis.core.management.impl.view.SessionView; import org.apache.activemq.artemis.core.messagecounter.MessageCounterManager; import org.apache.activemq.artemis.core.messagecounter.impl.MessageCounterManagerImpl; import org.apache.activemq.artemis.core.persistence.StorageManager; import org.apache.activemq.artemis.core.persistence.config.PersistedAddressSetting; import org.apache.activemq.artemis.core.persistence.config.PersistedRoles; import org.apache.activemq.artemis.core.postoffice.Binding; import org.apache.activemq.artemis.core.postoffice.Bindings; import org.apache.activemq.artemis.core.postoffice.DuplicateIDCache; import org.apache.activemq.artemis.core.postoffice.PostOffice; import org.apache.activemq.artemis.core.postoffice.impl.LocalQueueBinding; import org.apache.activemq.artemis.core.remoting.server.RemotingService; import org.apache.activemq.artemis.core.security.CheckType; import org.apache.activemq.artemis.core.security.Role; import org.apache.activemq.artemis.core.server.ActiveMQMessageBundle; import org.apache.activemq.artemis.core.server.ActiveMQServer; import org.apache.activemq.artemis.core.server.ActiveMQServerLogger; import org.apache.activemq.artemis.core.server.ConnectorServiceFactory; import org.apache.activemq.artemis.core.server.Consumer; import org.apache.activemq.artemis.core.server.DivertConfigurationRoutingType; import org.apache.activemq.artemis.core.server.JournalType; import org.apache.activemq.artemis.core.server.Queue; import org.apache.activemq.artemis.core.server.ServerConsumer; import org.apache.activemq.artemis.core.server.ServerProducer; import org.apache.activemq.artemis.core.server.ServerSession; import org.apache.activemq.artemis.core.server.cluster.ClusterConnection; import org.apache.activemq.artemis.core.server.cluster.ClusterManager; import org.apache.activemq.artemis.core.server.cluster.ha.HAPolicy; import org.apache.activemq.artemis.core.server.cluster.ha.LiveOnlyPolicy; import org.apache.activemq.artemis.core.server.cluster.ha.ScaleDownPolicy; import org.apache.activemq.artemis.core.server.cluster.ha.SharedStoreSlavePolicy; import org.apache.activemq.artemis.core.server.group.GroupingHandler; import org.apache.activemq.artemis.core.server.impl.Activation; import org.apache.activemq.artemis.core.server.impl.AddressInfo; import org.apache.activemq.artemis.core.server.impl.SharedNothingLiveActivation; import org.apache.activemq.artemis.core.settings.impl.AddressFullMessagePolicy; import org.apache.activemq.artemis.core.settings.impl.AddressSettings; import org.apache.activemq.artemis.core.settings.impl.SlowConsumerPolicy; import org.apache.activemq.artemis.core.transaction.ResourceManager; import org.apache.activemq.artemis.core.transaction.Transaction; import org.apache.activemq.artemis.core.transaction.TransactionDetail; import org.apache.activemq.artemis.core.transaction.TransactionDetailFactory; import org.apache.activemq.artemis.core.transaction.impl.CoreTransactionDetail; import org.apache.activemq.artemis.core.transaction.impl.XidImpl; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.utils.JsonLoader; import org.apache.activemq.artemis.utils.ListUtil; import org.apache.activemq.artemis.utils.SecurityFormatter; import org.apache.activemq.artemis.utils.collections.TypedProperties; import org.jboss.logging.Logger; public class ActiveMQServerControlImpl extends AbstractControl implements ActiveMQServerControl, NotificationEmitter, org.apache.activemq.artemis.core.server.management.NotificationListener { // Constants ----------------------------------------------------- private static final Logger logger = Logger.getLogger(ActiveMQServerControlImpl.class); // Attributes ---------------------------------------------------- private final PostOffice postOffice; private final Configuration configuration; private final ResourceManager resourceManager; private final RemotingService remotingService; private final ActiveMQServer server; private final MessageCounterManager messageCounterManager; private final NotificationBroadcasterSupport broadcaster; private final AtomicLong notifSeq = new AtomicLong(0); // Static -------------------------------------------------------- // Constructors -------------------------------------------------- public ActiveMQServerControlImpl(final PostOffice postOffice, final Configuration configuration, final ResourceManager resourceManager, final RemotingService remotingService, final ActiveMQServer messagingServer, final MessageCounterManager messageCounterManager, final StorageManager storageManager, final NotificationBroadcasterSupport broadcaster) throws Exception { super(ActiveMQServerControl.class, storageManager); this.postOffice = postOffice; this.configuration = configuration; this.resourceManager = resourceManager; this.remotingService = remotingService; server = messagingServer; this.messageCounterManager = messageCounterManager; this.broadcaster = broadcaster; server.getManagementService().addNotificationListener(this); } // ActiveMQServerControlMBean implementation -------------------- @Override public boolean isStarted() { clearIO(); try { return server.isStarted(); } finally { blockOnIO(); } } @Override public String getVersion() { checkStarted(); clearIO(); try { return server.getVersion().getFullVersion(); } finally { blockOnIO(); } } @Override public boolean isBackup() { checkStarted(); clearIO(); try { return server.getHAPolicy().isBackup(); } finally { blockOnIO(); } } @Override public boolean isSharedStore() { checkStarted(); clearIO(); try { return server.getHAPolicy().isSharedStore(); } finally { blockOnIO(); } } @Override public String getBindingsDirectory() { checkStarted(); clearIO(); try { return configuration.getBindingsDirectory(); } finally { blockOnIO(); } } public String[] getInterceptorClassNames() { checkStarted(); clearIO(); try { return configuration.getIncomingInterceptorClassNames().toArray(new String[configuration.getIncomingInterceptorClassNames().size()]); } finally { blockOnIO(); } } @Override public String[] getIncomingInterceptorClassNames() { checkStarted(); clearIO(); try { return configuration.getIncomingInterceptorClassNames().toArray(new String[configuration.getIncomingInterceptorClassNames().size()]); } finally { blockOnIO(); } } @Override public String[] getOutgoingInterceptorClassNames() { checkStarted(); clearIO(); try { return configuration.getOutgoingInterceptorClassNames().toArray(new String[configuration.getOutgoingInterceptorClassNames().size()]); } finally { blockOnIO(); } } @Override public int getJournalBufferSize() { checkStarted(); clearIO(); try { return configuration.getJournalType() == JournalType.ASYNCIO ? configuration.getJournalBufferSize_AIO() : configuration.getJournalBufferSize_NIO(); } finally { blockOnIO(); } } @Override public int getJournalBufferTimeout() { checkStarted(); clearIO(); try { return configuration.getJournalType() == JournalType.ASYNCIO ? configuration.getJournalBufferTimeout_AIO() : configuration.getJournalBufferTimeout_NIO(); } finally { blockOnIO(); } } @Override public void setFailoverOnServerShutdown(boolean failoverOnServerShutdown) { checkStarted(); clearIO(); try { HAPolicy haPolicy = server.getHAPolicy(); if (haPolicy instanceof SharedStoreSlavePolicy) { ((SharedStoreSlavePolicy) haPolicy).setFailoverOnServerShutdown(failoverOnServerShutdown); } } finally { blockOnIO(); } } @Override public boolean isFailoverOnServerShutdown() { checkStarted(); clearIO(); try { HAPolicy haPolicy = server.getHAPolicy(); if (haPolicy instanceof SharedStoreSlavePolicy) { return ((SharedStoreSlavePolicy) haPolicy).isFailoverOnServerShutdown(); } else { return false; } } finally { blockOnIO(); } } @Override public int getJournalMaxIO() { checkStarted(); clearIO(); try { return configuration.getJournalType() == JournalType.ASYNCIO ? configuration.getJournalMaxIO_AIO() : configuration.getJournalMaxIO_NIO(); } finally { blockOnIO(); } } @Override public String getJournalDirectory() { checkStarted(); clearIO(); try { return configuration.getJournalDirectory(); } finally { blockOnIO(); } } @Override public int getJournalFileSize() { checkStarted(); clearIO(); try { return configuration.getJournalFileSize(); } finally { blockOnIO(); } } @Override public int getJournalMinFiles() { checkStarted(); clearIO(); try { return configuration.getJournalMinFiles(); } finally { blockOnIO(); } } @Override public int getJournalCompactMinFiles() { checkStarted(); clearIO(); try { return configuration.getJournalCompactMinFiles(); } finally { blockOnIO(); } } @Override public int getJournalCompactPercentage() { checkStarted(); clearIO(); try { return configuration.getJournalCompactPercentage(); } finally { blockOnIO(); } } @Override public boolean isPersistenceEnabled() { checkStarted(); clearIO(); try { return configuration.isPersistenceEnabled(); } finally { blockOnIO(); } } @Override public String getJournalType() { checkStarted(); clearIO(); try { return configuration.getJournalType().toString(); } finally { blockOnIO(); } } @Override public String getPagingDirectory() { checkStarted(); clearIO(); try { return configuration.getPagingDirectory(); } finally { blockOnIO(); } } @Override public int getScheduledThreadPoolMaxSize() { checkStarted(); clearIO(); try { return configuration.getScheduledThreadPoolMaxSize(); } finally { blockOnIO(); } } @Override public int getThreadPoolMaxSize() { checkStarted(); clearIO(); try { return configuration.getThreadPoolMaxSize(); } finally { blockOnIO(); } } @Override public long getSecurityInvalidationInterval() { checkStarted(); clearIO(); try { return configuration.getSecurityInvalidationInterval(); } finally { blockOnIO(); } } @Override public boolean isClustered() { checkStarted(); clearIO(); try { return configuration.isClustered(); } finally { blockOnIO(); } } @Override public boolean isCreateBindingsDir() { checkStarted(); clearIO(); try { return configuration.isCreateBindingsDir(); } finally { blockOnIO(); } } @Override public boolean isCreateJournalDir() { checkStarted(); clearIO(); try { return configuration.isCreateJournalDir(); } finally { blockOnIO(); } } @Override public boolean isJournalSyncNonTransactional() { checkStarted(); clearIO(); try { return configuration.isJournalSyncNonTransactional(); } finally { blockOnIO(); } } @Override public boolean isJournalSyncTransactional() { checkStarted(); clearIO(); try { return configuration.isJournalSyncTransactional(); } finally { blockOnIO(); } } @Override public boolean isSecurityEnabled() { checkStarted(); clearIO(); try { return configuration.isSecurityEnabled(); } finally { blockOnIO(); } } @Override public boolean isAsyncConnectionExecutionEnabled() { checkStarted(); clearIO(); try { return configuration.isAsyncConnectionExecutionEnabled(); } finally { blockOnIO(); } } @Override public int getDiskScanPeriod() { checkStarted(); clearIO(); try { return configuration.getDiskScanPeriod(); } finally { blockOnIO(); } } @Override public int getMaxDiskUsage() { checkStarted(); clearIO(); try { return configuration.getMaxDiskUsage(); } finally { blockOnIO(); } } @Override public long getGlobalMaxSize() { checkStarted(); clearIO(); try { return configuration.getGlobalMaxSize(); } finally { blockOnIO(); } } @Override public long getAddressMemoryUsage() { checkStarted(); clearIO(); try { //this should not happen but if it does, return -1 to highlight it is not working if (server.getPagingManager() == null) { return -1L; } return server.getPagingManager().getGlobalSize(); } finally { blockOnIO(); } } @Override public int getAddressMemoryUsagePercentage() { long globalMaxSize = getGlobalMaxSize(); // no max size set implies 0% used if (globalMaxSize <= 0) { return 0; } long memoryUsed = getAddressMemoryUsage(); if (memoryUsed <= 0) { return 0; } double result = (100D * memoryUsed) / globalMaxSize; return (int) result; } @Override public boolean freezeReplication() { Activation activation = server.getActivation(); if (activation instanceof SharedNothingLiveActivation) { SharedNothingLiveActivation liveActivation = (SharedNothingLiveActivation) activation; liveActivation.freezeReplication(); return true; } return false; } private enum AddressInfoTextFormatter { Long { @Override public StringBuilder format(AddressInfo addressInfo, StringBuilder output) { output.append("Address [name=").append(addressInfo.getName()); output.append(", routingTypes={"); final EnumSet<RoutingType> routingTypes = addressInfo.getRoutingTypes(); if (!routingTypes.isEmpty()) { for (RoutingType routingType : routingTypes) { output.append(routingType).append(','); } // delete hanging comma output.deleteCharAt(output.length() - 1); } output.append('}'); output.append(", autoCreated=").append(addressInfo.isAutoCreated()); output.append(']'); return output; } }; public abstract StringBuilder format(AddressInfo addressInfo, StringBuilder output); } public enum QueueTextFormatter { Long { @Override StringBuilder format(Queue queue, StringBuilder output) { output.append("Queue [name=").append(queue.getName()); output.append(", address=").append(queue.getAddress()); output.append(", routingType=").append(queue.getRoutingType()); final Filter filter = queue.getFilter(); if (filter != null) { output.append(", filter=").append(filter.getFilterString()); } output.append(", durable=").append(queue.isDurable()); final int maxConsumers = queue.getMaxConsumers(); if (maxConsumers != Queue.MAX_CONSUMERS_UNLIMITED) { output.append(", maxConsumers=").append(queue.getMaxConsumers()); } output.append(", purgeOnNoConsumers=").append(queue.isPurgeOnNoConsumers()); output.append(", autoCreateAddress=").append(queue.isAutoCreated()); output.append(']'); return output; } }; abstract StringBuilder format(Queue queue, StringBuilder output); } @Override public String createAddress(String name, String routingTypes) throws Exception { checkStarted(); clearIO(); try { EnumSet<RoutingType> set = EnumSet.noneOf(RoutingType.class); for (String routingType : ListUtil.toList(routingTypes)) { set.add(RoutingType.valueOf(routingType)); } final AddressInfo addressInfo = new AddressInfo(new SimpleString(name), set); if (server.addAddressInfo(addressInfo)) { return AddressInfoTextFormatter.Long.format(addressInfo, new StringBuilder()).toString(); } else { throw ActiveMQMessageBundle.BUNDLE.addressAlreadyExists(addressInfo.getName()); } } finally { blockOnIO(); } } @Override public String updateAddress(String name, String routingTypes) throws Exception { checkStarted(); clearIO(); try { final EnumSet<RoutingType> routingTypeSet; if (routingTypes == null) { routingTypeSet = null; } else { routingTypeSet = EnumSet.noneOf(RoutingType.class); final String[] routingTypeNames = routingTypes.split(","); for (String routingTypeName : routingTypeNames) { routingTypeSet.add(RoutingType.valueOf(routingTypeName)); } } if (!server.updateAddressInfo(SimpleString.toSimpleString(name), routingTypeSet)) { throw ActiveMQMessageBundle.BUNDLE.addressDoesNotExist(SimpleString.toSimpleString(name)); } return AddressInfoTextFormatter.Long.format(server.getAddressInfo(SimpleString.toSimpleString(name)), new StringBuilder()).toString(); } finally { blockOnIO(); } } @Override public void deleteAddress(String name) throws Exception { deleteAddress(name, false); } @Override public void deleteAddress(String name, boolean force) throws Exception { checkStarted(); clearIO(); try { server.removeAddressInfo(new SimpleString(name), null, force); } catch (ActiveMQException e) { throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Deprecated @Override public void deployQueue(final String address, final String name, final String filterString) throws Exception { deployQueue(address, name, filterString, true); } @Deprecated @Override public void deployQueue(final String address, final String name, final String filterStr, final boolean durable) throws Exception { checkStarted(); SimpleString filter = filterStr == null ? null : new SimpleString(filterStr); clearIO(); try { server.createQueue(SimpleString.toSimpleString(address), server.getAddressSettingsRepository().getMatch(address).getDefaultQueueRoutingType(), new SimpleString(name), filter, durable, false); } finally { blockOnIO(); } } @Override public void createQueue(final String address, final String name) throws Exception { createQueue(address, name, true); } @Override public void createQueue(final String address, final String name, final String routingType) throws Exception { createQueue(address, name, true, routingType); } @Override public void createQueue(final String address, final String name, final boolean durable) throws Exception { createQueue(address, name, null, durable); } @Override public void createQueue(final String address, final String name, final boolean durable, final String routingType) throws Exception { createQueue(address, name, null, durable, routingType); } @Override public void createQueue(final String address, final String name, final String filterStr, final boolean durable) throws Exception { createQueue(address, name, filterStr, durable, server.getAddressSettingsRepository().getMatch(address == null ? name : address).getDefaultQueueRoutingType().toString()); } @Override public void createQueue(final String address, final String name, final String filterStr, final boolean durable, final String routingType) throws Exception { AddressSettings addressSettings = server.getAddressSettingsRepository().getMatch(address == null ? name : address); createQueue(address, routingType, name, filterStr, durable, addressSettings.getDefaultMaxConsumers(), addressSettings.isDefaultPurgeOnNoConsumers(), addressSettings.isAutoCreateAddresses()); } @Override public String createQueue(String address, String routingType, String name, String filterStr, boolean durable, int maxConsumers, boolean purgeOnNoConsumers, boolean autoCreateAddress) throws Exception { AddressSettings addressSettings = server.getAddressSettingsRepository().getMatch(address == null ? name : address); return createQueue(address, routingType, name, filterStr, durable, maxConsumers, purgeOnNoConsumers, addressSettings.isDefaultExclusiveQueue(), addressSettings.isDefaultLastValueQueue(), addressSettings.getDefaultConsumersBeforeDispatch(), addressSettings.getDefaultDelayBeforeDispatch(), autoCreateAddress); } @Override public String createQueue(String address, String routingType, String name, String filterStr, boolean durable, int maxConsumers, boolean purgeOnNoConsumers, boolean exclusive, boolean lastValue, int consumersBeforeDispatch, long delayBeforeDispatch, boolean autoCreateAddress) throws Exception { checkStarted(); clearIO(); SimpleString filter = filterStr == null ? null : new SimpleString(filterStr); try { if (filterStr != null && !filterStr.trim().equals("")) { filter = new SimpleString(filterStr); } final Queue queue = server.createQueue(SimpleString.toSimpleString(address), RoutingType.valueOf(routingType.toUpperCase()), new SimpleString(name), filter, durable, false, maxConsumers, purgeOnNoConsumers, exclusive, lastValue, consumersBeforeDispatch, delayBeforeDispatch, autoCreateAddress); return QueueTextFormatter.Long.format(queue, new StringBuilder()).toString(); } catch (ActiveMQException e) { throw new IllegalStateException(e.getMessage()); } finally { blockOnIO(); } } @Deprecated @Override public String updateQueue(String name, String routingType, Integer maxConsumers, Boolean purgeOnNoConsumers) throws Exception { return updateQueue(name, routingType, maxConsumers, purgeOnNoConsumers, null); } @Deprecated @Override public String updateQueue(String name, String routingType, Integer maxConsumers, Boolean purgeOnNoConsumers, Boolean exclusive) throws Exception { return updateQueue(name, routingType, maxConsumers, purgeOnNoConsumers, exclusive, null); } @Override public String updateQueue(String name, String routingType, Integer maxConsumers, Boolean purgeOnNoConsumers, Boolean exclusive, String user) throws Exception { return updateQueue(name, routingType, maxConsumers, purgeOnNoConsumers, exclusive, null, null, user); } @Override public String updateQueue(String name, String routingType, Integer maxConsumers, Boolean purgeOnNoConsumers, Boolean exclusive, Integer consumersBeforeDispatch, Long delayBeforeDispatch, String user) throws Exception { checkStarted(); clearIO(); try { final Queue queue = server.updateQueue(name, routingType != null ? RoutingType.valueOf(routingType) : null, maxConsumers, purgeOnNoConsumers, exclusive, consumersBeforeDispatch, delayBeforeDispatch, user); if (queue == null) { throw ActiveMQMessageBundle.BUNDLE.noSuchQueue(new SimpleString(name)); } return QueueTextFormatter.Long.format(queue, new StringBuilder()).toString(); } finally { blockOnIO(); } } @Override public String[] getQueueNames() { return getQueueNames(null); } @Override public String[] getQueueNames(String routingType) { checkStarted(); clearIO(); try { Object[] queueControls = server.getManagementService().getResources(QueueControl.class); List<String> names = new ArrayList<>(); for (int i = 0; i < queueControls.length; i++) { QueueControl queueControl = (QueueControl) queueControls[i]; if (routingType != null && queueControl.getRoutingType().equals(routingType.toUpperCase())) { names.add(queueControl.getName()); } else if (routingType == null) { names.add(queueControl.getName()); } } String[] result = new String[names.size()]; return names.toArray(result); } finally { blockOnIO(); } } @Override public String getUptime() { checkStarted(); clearIO(); try { return server.getUptime(); } finally { blockOnIO(); } } @Override public long getUptimeMillis() { checkStarted(); clearIO(); try { return server.getUptimeMillis(); } finally { blockOnIO(); } } @Override public boolean isReplicaSync() { checkStarted(); clearIO(); try { return server.isReplicaSync(); } finally { blockOnIO(); } } @Override public String[] getAddressNames() { checkStarted(); clearIO(); try { Object[] addresses = server.getManagementService().getResources(AddressControl.class); String[] names = new String[addresses.length]; for (int i = 0; i < addresses.length; i++) { AddressControl address = (AddressControl) addresses[i]; names[i] = address.getAddress(); } return names; } finally { blockOnIO(); } } @Override public void destroyQueue(final String name, final boolean removeConsumers, final boolean autoDeleteAddress) throws Exception { checkStarted(); clearIO(); try { SimpleString queueName = new SimpleString(name); server.destroyQueue(queueName, null, !removeConsumers, removeConsumers, autoDeleteAddress); } finally { blockOnIO(); } } @Override public void destroyQueue(final String name, final boolean removeConsumers) throws Exception { destroyQueue(name, removeConsumers, false); } @Override public void destroyQueue(final String name) throws Exception { destroyQueue(name, false); } @Override public String getAddressInfo(String address) throws ActiveMQAddressDoesNotExistException { checkStarted(); clearIO(); try { final AddressInfo addressInfo = server.getAddressInfo(SimpleString.toSimpleString(address)); if (addressInfo == null) { throw ActiveMQMessageBundle.BUNDLE.addressDoesNotExist(SimpleString.toSimpleString(address)); } else { return AddressInfoTextFormatter.Long.format(addressInfo, new StringBuilder()).toString(); } } finally { blockOnIO(); } } @Override public String listBindingsForAddress(String address) throws Exception { checkStarted(); clearIO(); try { final Bindings bindings = server.getPostOffice().getBindingsForAddress(new SimpleString(address)); return bindings.getBindings().stream().map(Binding::toManagementString).collect(Collectors.joining(",")); } finally { blockOnIO(); } } @Override public String listAddresses(String separator) throws Exception { checkStarted(); clearIO(); try { final Set<SimpleString> addresses = server.getPostOffice().getAddresses(); TreeSet<SimpleString> sortAddress = new TreeSet<>(new Comparator<SimpleString>() { @Override public int compare(SimpleString o1, SimpleString o2) { return o1.toString().compareToIgnoreCase(o2.toString()); } }); sortAddress.addAll(addresses); StringBuilder result = new StringBuilder(); for (SimpleString string : sortAddress) { if (result.length() > 0) { result.append(separator); } result.append(string); } return result.toString(); } finally { blockOnIO(); } } @Override public int getConnectionCount() { checkStarted(); clearIO(); try { return server.getConnectionCount(); } finally { blockOnIO(); } } @Override public long getTotalConnectionCount() { checkStarted(); clearIO(); try { return server.getTotalConnectionCount(); } finally { blockOnIO(); } } @Override public long getTotalMessageCount() { checkStarted(); clearIO(); try { return server.getTotalMessageCount(); } finally { blockOnIO(); } } @Override public long getTotalMessagesAdded() { checkStarted(); clearIO(); try { return server.getTotalMessagesAdded(); } finally { blockOnIO(); } } @Override public long getTotalMessagesAcknowledged() { checkStarted(); clearIO(); try { return server.getTotalMessagesAcknowledged(); } finally { blockOnIO(); } } @Override public long getTotalConsumerCount() { checkStarted(); clearIO(); try { return server.getTotalConsumerCount(); } finally { blockOnIO(); } } @Override public void enableMessageCounters() { checkStarted(); clearIO(); try { setMessageCounterEnabled(true); } finally { blockOnIO(); } } @Override public void disableMessageCounters() { checkStarted(); clearIO(); try { setMessageCounterEnabled(false); } finally { blockOnIO(); } } @Override public void resetAllMessageCounters() { checkStarted(); clearIO(); try { messageCounterManager.resetAllCounters(); } finally { blockOnIO(); } } @Override public void resetAllMessageCounterHistories() { checkStarted(); clearIO(); try { messageCounterManager.resetAllCounterHistories(); } finally { blockOnIO(); } } @Override public boolean isMessageCounterEnabled() { checkStarted(); clearIO(); try { return configuration.isMessageCounterEnabled(); } finally { blockOnIO(); } } @Override public synchronized long getMessageCounterSamplePeriod() { checkStarted(); clearIO(); try { return messageCounterManager.getSamplePeriod(); } finally { blockOnIO(); } } @Override public synchronized void setMessageCounterSamplePeriod(final long newPeriod) { checkStarted(); checkStarted(); clearIO(); try { if (newPeriod < MessageCounterManagerImpl.MIN_SAMPLE_PERIOD) { if (newPeriod <= 0) { throw ActiveMQMessageBundle.BUNDLE.periodMustGreaterThanZero(newPeriod); } ActiveMQServerLogger.LOGGER.invalidMessageCounterPeriod(newPeriod); } if (messageCounterManager != null && newPeriod != messageCounterManager.getSamplePeriod()) { messageCounterManager.reschedule(newPeriod); } } finally { blockOnIO(); } } @Override public int getMessageCounterMaxDayCount() { checkStarted(); clearIO(); try { return messageCounterManager.getMaxDayCount(); } finally { blockOnIO(); } } @Override public void setMessageCounterMaxDayCount(final int count) { checkStarted(); clearIO(); try { if (count <= 0) { throw ActiveMQMessageBundle.BUNDLE.greaterThanZero(count); } messageCounterManager.setMaxDayCount(count); } finally { blockOnIO(); } } @Override public String[] listPreparedTransactions() { checkStarted(); clearIO(); try { DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.MEDIUM); Map<Xid, Long> xids = resourceManager.getPreparedTransactionsWithCreationTime(); ArrayList<Entry<Xid, Long>> xidsSortedByCreationTime = new ArrayList<>(xids.entrySet()); Collections.sort(xidsSortedByCreationTime, new Comparator<Entry<Xid, Long>>() { @Override public int compare(final Entry<Xid, Long> entry1, final Entry<Xid, Long> entry2) { // sort by creation time, oldest first return (int) (entry1.getValue() - entry2.getValue()); } }); String[] s = new String[xidsSortedByCreationTime.size()]; int i = 0; for (Map.Entry<Xid, Long> entry : xidsSortedByCreationTime) { Date creation = new Date(entry.getValue()); Xid xid = entry.getKey(); s[i++] = dateFormat.format(creation) + " base64: " + XidImpl.toBase64String(xid) + " " + xid.toString(); } return s; } finally { blockOnIO(); } } @Override public String listPreparedTransactionDetailsAsJSON() throws Exception { return listPreparedTransactionDetailsAsJSON((xid, tx, creation) -> new CoreTransactionDetail(xid, tx, creation)); } public String listPreparedTransactionDetailsAsJSON(TransactionDetailFactory factory) throws Exception { checkStarted(); clearIO(); try { Map<Xid, Long> xids = resourceManager.getPreparedTransactionsWithCreationTime(); if (xids == null || xids.size() == 0) { return ""; } ArrayList<Entry<Xid, Long>> xidsSortedByCreationTime = new ArrayList<>(xids.entrySet()); Collections.sort(xidsSortedByCreationTime, new Comparator<Entry<Xid, Long>>() { @Override public int compare(final Entry<Xid, Long> entry1, final Entry<Xid, Long> entry2) { // sort by creation time, oldest first return (int) (entry1.getValue() - entry2.getValue()); } }); JsonArrayBuilder txDetailListJson = JsonLoader.createArrayBuilder(); for (Map.Entry<Xid, Long> entry : xidsSortedByCreationTime) { Xid xid = entry.getKey(); Transaction tx = resourceManager.getTransaction(xid); if (tx == null) { continue; } TransactionDetail detail = factory.createTransactionDetail(xid, tx, entry.getValue()); txDetailListJson.add(detail.toJSON()); } return txDetailListJson.build().toString(); } finally { blockOnIO(); } } @Override public String listPreparedTransactionDetailsAsHTML() throws Exception { return listPreparedTransactionDetailsAsHTML((xid, tx, creation) -> new CoreTransactionDetail(xid, tx, creation)); } public String listPreparedTransactionDetailsAsHTML(TransactionDetailFactory factory) throws Exception { checkStarted(); clearIO(); try { Map<Xid, Long> xids = resourceManager.getPreparedTransactionsWithCreationTime(); if (xids == null || xids.size() == 0) { return "<h3>*** Prepared Transaction Details ***</h3><p>No entry.</p>"; } ArrayList<Entry<Xid, Long>> xidsSortedByCreationTime = new ArrayList<>(xids.entrySet()); Collections.sort(xidsSortedByCreationTime, new Comparator<Entry<Xid, Long>>() { @Override public int compare(final Entry<Xid, Long> entry1, final Entry<Xid, Long> entry2) { // sort by creation time, oldest first return (int) (entry1.getValue() - entry2.getValue()); } }); StringBuilder html = new StringBuilder(); html.append("<h3>*** Prepared Transaction Details ***</h3>"); for (Map.Entry<Xid, Long> entry : xidsSortedByCreationTime) { Xid xid = entry.getKey(); Transaction tx = resourceManager.getTransaction(xid); if (tx == null) { continue; } TransactionDetail detail = factory.createTransactionDetail(xid, tx, entry.getValue()); JsonObject txJson = detail.toJSON(); html.append("<table border=\"1\">"); html.append("<tr><th>creation_time</th>"); html.append("<td>" + txJson.get(TransactionDetail.KEY_CREATION_TIME) + "</td>"); html.append("<th>xid_as_base_64</th>"); html.append("<td colspan=\"3\">" + txJson.get(TransactionDetail.KEY_XID_AS_BASE64) + "</td></tr>"); html.append("<tr><th>xid_format_id</th>"); html.append("<td>" + txJson.get(TransactionDetail.KEY_XID_FORMAT_ID) + "</td>"); html.append("<th>xid_global_txid</th>"); html.append("<td>" + txJson.get(TransactionDetail.KEY_XID_GLOBAL_TXID) + "</td>"); html.append("<th>xid_branch_qual</th>"); html.append("<td>" + txJson.get(TransactionDetail.KEY_XID_BRANCH_QUAL) + "</td></tr>"); html.append("<tr><th colspan=\"6\">Message List</th></tr>"); html.append("<tr><td colspan=\"6\">"); html.append("<table border=\"1\" cellspacing=\"0\" cellpadding=\"0\">"); JsonArray msgs = txJson.getJsonArray(TransactionDetail.KEY_TX_RELATED_MESSAGES); for (int i = 0; i < msgs.size(); i++) { JsonObject msgJson = msgs.getJsonObject(i); JsonObject props = msgJson.getJsonObject(TransactionDetail.KEY_MSG_PROPERTIES); StringBuilder propstr = new StringBuilder(); Set<String> keys = props.keySet(); for (String key : keys) { propstr.append(key); propstr.append("="); propstr.append(props.get(key)); propstr.append(", "); } html.append("<th>operation_type</th>"); html.append("<td>" + msgJson.get(TransactionDetail.KEY_MSG_OP_TYPE) + "</th>"); html.append("<th>message_type</th>"); html.append("<td>" + msgJson.get(TransactionDetail.KEY_MSG_TYPE) + "</td></tr>"); html.append("<tr><th>properties</th>"); html.append("<td colspan=\"3\">" + propstr.toString() + "</td></tr>"); } html.append("</table></td></tr>"); html.append("</table><br>"); } return html.toString(); } finally { blockOnIO(); } } @Override public String[] listHeuristicCommittedTransactions() { checkStarted(); clearIO(); try { List<Xid> xids = resourceManager.getHeuristicCommittedTransactions(); String[] s = new String[xids.size()]; int i = 0; for (Xid xid : xids) { s[i++] = XidImpl.toBase64String(xid); } return s; } finally { blockOnIO(); } } @Override public String[] listHeuristicRolledBackTransactions() { checkStarted(); clearIO(); try { List<Xid> xids = resourceManager.getHeuristicRolledbackTransactions(); String[] s = new String[xids.size()]; int i = 0; for (Xid xid : xids) { s[i++] = XidImpl.toBase64String(xid); } return s; } finally { blockOnIO(); } } @Override public synchronized boolean commitPreparedTransaction(final String transactionAsBase64) throws Exception { checkStarted(); clearIO(); try { List<Xid> xids = resourceManager.getPreparedTransactions(); for (Xid xid : xids) { if (XidImpl.toBase64String(xid).equals(transactionAsBase64)) { Transaction transaction = resourceManager.removeTransaction(xid); transaction.commit(false); long recordID = server.getStorageManager().storeHeuristicCompletion(xid, true); storageManager.waitOnOperations(); resourceManager.putHeuristicCompletion(recordID, xid, true); return true; } } return false; } finally { blockOnIO(); } } @Override public synchronized boolean rollbackPreparedTransaction(final String transactionAsBase64) throws Exception { checkStarted(); clearIO(); try { List<Xid> xids = resourceManager.getPreparedTransactions(); for (Xid xid : xids) { if (XidImpl.toBase64String(xid).equals(transactionAsBase64)) { Transaction transaction = resourceManager.removeTransaction(xid); transaction.rollback(); long recordID = server.getStorageManager().storeHeuristicCompletion(xid, false); server.getStorageManager().waitOnOperations(); resourceManager.putHeuristicCompletion(recordID, xid, false); return true; } } return false; } finally { blockOnIO(); } } @Override public String[] listRemoteAddresses() { checkStarted(); clearIO(); try { Set<RemotingConnection> connections = remotingService.getConnections(); String[] remoteAddresses = new String[connections.size()]; int i = 0; for (RemotingConnection connection : connections) { remoteAddresses[i++] = connection.getRemoteAddress(); } return remoteAddresses; } finally { blockOnIO(); } } @Override public String[] listRemoteAddresses(final String ipAddress) { checkStarted(); clearIO(); try { Set<RemotingConnection> connections = remotingService.getConnections(); List<String> remoteConnections = new ArrayList<>(); for (RemotingConnection connection : connections) { String remoteAddress = connection.getRemoteAddress(); if (remoteAddress.contains(ipAddress)) { remoteConnections.add(connection.getRemoteAddress()); } } return remoteConnections.toArray(new String[remoteConnections.size()]); } finally { blockOnIO(); } } @Override public boolean closeConnectionsForAddress(final String ipAddress) { checkStarted(); clearIO(); try { boolean closed = false; Set<RemotingConnection> connections = remotingService.getConnections(); for (RemotingConnection connection : connections) { String remoteAddress = connection.getRemoteAddress(); if (remoteAddress.contains(ipAddress)) { connection.fail(ActiveMQMessageBundle.BUNDLE.connectionsClosedByManagement(ipAddress)); remotingService.removeConnection(connection.getID()); closed = true; } } return closed; } finally { blockOnIO(); } } @Override public boolean closeConsumerConnectionsForAddress(final String address) { boolean closed = false; checkStarted(); clearIO(); try { for (Binding binding : postOffice.getMatchingBindings(SimpleString.toSimpleString(address)).getBindings()) { if (binding instanceof LocalQueueBinding) { Queue queue = ((LocalQueueBinding) binding).getQueue(); for (Consumer consumer : queue.getConsumers()) { if (consumer instanceof ServerConsumer) { ServerConsumer serverConsumer = (ServerConsumer) consumer; RemotingConnection connection = null; for (RemotingConnection potentialConnection : remotingService.getConnections()) { if (potentialConnection.getID().toString().equals(serverConsumer.getConnectionID())) { connection = potentialConnection; } } if (connection != null) { remotingService.removeConnection(connection.getID()); connection.fail(ActiveMQMessageBundle.BUNDLE.consumerConnectionsClosedByManagement(address)); closed = true; } } } } } } catch (Exception e) { ActiveMQServerLogger.LOGGER.failedToCloseConsumerConnectionsForAddress(address, e); } finally { blockOnIO(); } return closed; } @Override public boolean closeConnectionsForUser(final String userName) { boolean closed = false; checkStarted(); clearIO(); try { for (ServerSession serverSession : server.getSessions()) { if (serverSession.getUsername() != null && serverSession.getUsername().equals(userName)) { RemotingConnection connection = null; for (RemotingConnection potentialConnection : remotingService.getConnections()) { if (potentialConnection.getID().toString().equals(serverSession.getConnectionID().toString())) { connection = potentialConnection; } } if (connection != null) { remotingService.removeConnection(connection.getID()); connection.fail(ActiveMQMessageBundle.BUNDLE.connectionsForUserClosedByManagement(userName)); closed = true; } } } } finally { blockOnIO(); } return closed; } @Override public boolean closeConnectionWithID(final String ID) { checkStarted(); clearIO(); try { for (RemotingConnection connection : remotingService.getConnections()) { if (connection.getID().toString().equals(ID)) { remotingService.removeConnection(connection.getID()); connection.fail(ActiveMQMessageBundle.BUNDLE.connectionWithIDClosedByManagement(ID)); return true; } } } finally { blockOnIO(); } return false; } @Override public boolean closeSessionWithID(final String connectionID, final String ID) throws Exception { checkStarted(); clearIO(); try { List<ServerSession> sessions = server.getSessions(connectionID); for (ServerSession session : sessions) { if (session.getName().equals(ID.toString())) { session.close(true); return true; } } } finally { blockOnIO(); } return false; } @Override public boolean closeConsumerWithID(final String sessionID, final String ID) throws Exception { checkStarted(); clearIO(); try { Set<ServerSession> sessions = server.getSessions(); for (ServerSession session : sessions) { if (session.getName().equals(sessionID.toString())) { Set<ServerConsumer> serverConsumers = session.getServerConsumers(); for (ServerConsumer serverConsumer : serverConsumers) { if (serverConsumer.sequentialID() == Long.valueOf(ID)) { serverConsumer.disconnect(); return true; } } } } } finally { blockOnIO(); } return false; } @Override public String[] listConnectionIDs() { checkStarted(); clearIO(); try { Set<RemotingConnection> connections = remotingService.getConnections(); String[] connectionIDs = new String[connections.size()]; int i = 0; for (RemotingConnection connection : connections) { connectionIDs[i++] = connection.getID().toString(); } return connectionIDs; } finally { blockOnIO(); } } @Override public String[] listSessions(final String connectionID) { checkStarted(); clearIO(); try { List<ServerSession> sessions = server.getSessions(connectionID); String[] sessionIDs = new String[sessions.size()]; int i = 0; for (ServerSession serverSession : sessions) { sessionIDs[i++] = serverSession.getName(); } return sessionIDs; } finally { blockOnIO(); } } /* (non-Javadoc) * @see org.apache.activemq.artemis.api.core.management.ActiveMQServerControl#listProducersInfoAsJSON() */ @Override public String listProducersInfoAsJSON() throws Exception { JsonArrayBuilder producers = JsonLoader.createArrayBuilder(); for (ServerSession session : server.getSessions()) { session.describeProducersInfo(producers); } return producers.build().toString(); } @Override public String listConnections(String options, int page, int pageSize) throws Exception { checkStarted(); clearIO(); try { server.getPostOffice().getAddresses(); ConnectionView view = new ConnectionView(server); view.setCollection(server.getRemotingService().getConnections()); view.setOptions(options); return view.getResultsAsJson(page, pageSize); } finally { blockOnIO(); } } @Override public String listSessions(String options, int page, int pageSize) throws Exception { checkStarted(); clearIO(); try { SessionView view = new SessionView(); view.setCollection(server.getSessions()); view.setOptions(options); return view.getResultsAsJson(page, pageSize); } finally { blockOnIO(); } } @Override public String listConsumers(String options, int page, int pageSize) throws Exception { checkStarted(); clearIO(); try { Set<ServerConsumer> consumers = new HashSet(); for (ServerSession session : server.getSessions()) { consumers.addAll(session.getServerConsumers()); } ConsumerView view = new ConsumerView(server); view.setCollection(consumers); view.setOptions(options); return view.getResultsAsJson(page, pageSize); } finally { blockOnIO(); } } @Override public String listAddresses(String options, int page, int pageSize) throws Exception { checkStarted(); clearIO(); try { final Set<SimpleString> addresses = server.getPostOffice().getAddresses(); List<AddressInfo> addressInfo = new ArrayList<>(); for (SimpleString address : addresses) { AddressInfo info = server.getPostOffice().getAddressInfo(address); //ignore if no longer available if (info != null) { addressInfo.add(info); } } AddressView view = new AddressView(server); view.setCollection(addressInfo); view.setOptions(options); return view.getResultsAsJson(page, pageSize); } finally { blockOnIO(); } } @Override public String listQueues(String options, int page, int pageSize) throws Exception { checkStarted(); clearIO(); try { List<QueueControl> queues = new ArrayList<>(); Object[] qs = server.getManagementService().getResources(QueueControl.class); for (int i = 0; i < qs.length; i++) { queues.add((QueueControl) qs[i]); } QueueView view = new QueueView(server); view.setCollection(queues); view.setOptions(options); return view.getResultsAsJson(page, pageSize); } finally { blockOnIO(); } } @Override public String listProducers(@Parameter(name = "Options") String options, @Parameter(name = "Page Number") int page, @Parameter(name = "Page Size") int pageSize) throws Exception { checkStarted(); clearIO(); try { Set<ServerProducer> producers = new HashSet<>(); for (ServerSession session : server.getSessions()) { producers.addAll(session.getServerProducers().values()); } ProducerView view = new ProducerView(server); view.setCollection(producers); view.setOptions(options); return view.getResultsAsJson(page, pageSize); } finally { blockOnIO(); } } @Override public String listConnectionsAsJSON() throws Exception { checkStarted(); clearIO(); try { JsonArrayBuilder array = JsonLoader.createArrayBuilder(); Set<RemotingConnection> connections = server.getRemotingService().getConnections(); for (RemotingConnection connection : connections) { JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("connectionID", connection.getID().toString()).add("clientAddress", connection.getRemoteAddress()).add("creationTime", connection.getCreationTime()).add("implementation", connection.getClass().getSimpleName()).add("sessionCount", server.getSessions(connection.getID().toString()).size()); array.add(obj); } return array.build().toString(); } finally { blockOnIO(); } } @Override public String listSessionsAsJSON(final String connectionID) throws Exception { checkStarted(); clearIO(); JsonArrayBuilder array = JsonLoader.createArrayBuilder(); try { List<ServerSession> sessions = server.getSessions(connectionID); for (ServerSession sess : sessions) { buildSessionJSON(array, sess); } } finally { blockOnIO(); } return array.build().toString(); } @Override public String listAllSessionsAsJSON() throws Exception { checkStarted(); clearIO(); JsonArrayBuilder array = JsonLoader.createArrayBuilder(); try { Set<ServerSession> sessions = server.getSessions(); for (ServerSession sess : sessions) { buildSessionJSON(array, sess); } } finally { blockOnIO(); } return array.build().toString(); } public void buildSessionJSON(JsonArrayBuilder array, ServerSession sess) { JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("sessionID", sess.getName()).add("creationTime", sess.getCreationTime()).add("consumerCount", sess.getServerConsumers().size()); if (sess.getValidatedUser() != null) { obj.add("principal", sess.getValidatedUser()); } if (sess.getMetaData() != null) { final JsonObjectBuilder metadata = JsonLoader.createObjectBuilder(); for (Entry<String, String> entry : sess.getMetaData().entrySet()) { metadata.add(entry.getKey(), entry.getValue()); } obj.add("metadata", metadata); } array.add(obj); } @Override public String listConsumersAsJSON(String connectionID) throws Exception { checkStarted(); clearIO(); try { JsonArrayBuilder array = JsonLoader.createArrayBuilder(); Set<RemotingConnection> connections = server.getRemotingService().getConnections(); for (RemotingConnection connection : connections) { if (connectionID.equals(connection.getID().toString())) { List<ServerSession> sessions = server.getSessions(connectionID); for (ServerSession session : sessions) { Set<ServerConsumer> consumers = session.getServerConsumers(); for (ServerConsumer consumer : consumers) { JsonObject obj = toJSONObject(consumer); if (obj != null) { array.add(obj); } } } } } return array.build().toString(); } finally { blockOnIO(); } } @Override public String listAllConsumersAsJSON() throws Exception { checkStarted(); clearIO(); try { JsonArrayBuilder array = JsonLoader.createArrayBuilder(); Set<ServerSession> sessions = server.getSessions(); for (ServerSession session : sessions) { Set<ServerConsumer> consumers = session.getServerConsumers(); for (ServerConsumer consumer : consumers) { JsonObject obj = toJSONObject(consumer); if (obj != null) { array.add(obj); } } } return array.build().toString(); } finally { blockOnIO(); } } private JsonObject toJSONObject(ServerConsumer consumer) throws Exception { JsonObjectBuilder obj = JsonLoader.createObjectBuilder().add("consumerID", consumer.getID()).add("connectionID", consumer.getConnectionID().toString()).add("sessionID", consumer.getSessionID()).add("queueName", consumer.getQueue().getName().toString()).add("browseOnly", consumer.isBrowseOnly()).add("creationTime", consumer.getCreationTime()).add("deliveringCount", consumer.getDeliveringMessages().size()); if (consumer.getFilter() != null) { obj.add("filter", consumer.getFilter().getFilterString().toString()); } return obj.build(); } @Override public Object[] getConnectors() throws Exception { checkStarted(); clearIO(); try { Collection<TransportConfiguration> connectorConfigurations = configuration.getConnectorConfigurations().values(); Object[] ret = new Object[connectorConfigurations.size()]; int i = 0; for (TransportConfiguration config : connectorConfigurations) { Object[] tc = new Object[3]; tc[0] = config.getName(); tc[1] = config.getFactoryClassName(); tc[2] = config.getParams(); ret[i++] = tc; } return ret; } finally { blockOnIO(); } } @Override public String getConnectorsAsJSON() throws Exception { checkStarted(); clearIO(); try { JsonArrayBuilder array = JsonLoader.createArrayBuilder(); for (TransportConfiguration config : configuration.getConnectorConfigurations().values()) { array.add(config.toJson()); } return array.build().toString(); } finally { blockOnIO(); } } @Override public void addSecuritySettings(final String addressMatch, final String sendRoles, final String consumeRoles, final String createDurableQueueRoles, final String deleteDurableQueueRoles, final String createNonDurableQueueRoles, final String deleteNonDurableQueueRoles, final String manageRoles) throws Exception { addSecuritySettings(addressMatch, sendRoles, consumeRoles, createDurableQueueRoles, deleteDurableQueueRoles, createNonDurableQueueRoles, deleteNonDurableQueueRoles, manageRoles, ""); } @Override public void addSecuritySettings(final String addressMatch, final String sendRoles, final String consumeRoles, final String createDurableQueueRoles, final String deleteDurableQueueRoles, final String createNonDurableQueueRoles, final String deleteNonDurableQueueRoles, final String manageRoles, final String browseRoles) throws Exception { addSecuritySettings(addressMatch, sendRoles, consumeRoles, createDurableQueueRoles, deleteDurableQueueRoles, createNonDurableQueueRoles, deleteNonDurableQueueRoles, manageRoles, browseRoles, "", ""); } @Override public void addSecuritySettings(final String addressMatch, final String sendRoles, final String consumeRoles, final String createDurableQueueRoles, final String deleteDurableQueueRoles, final String createNonDurableQueueRoles, final String deleteNonDurableQueueRoles, final String manageRoles, final String browseRoles, final String createAddressRoles, final String deleteAddressRoles) throws Exception { checkStarted(); clearIO(); try { Set<Role> roles = SecurityFormatter.createSecurity(sendRoles, consumeRoles, createDurableQueueRoles, deleteDurableQueueRoles, createNonDurableQueueRoles, deleteNonDurableQueueRoles, manageRoles, browseRoles, createAddressRoles, deleteAddressRoles); server.getSecurityRepository().addMatch(addressMatch, roles); PersistedRoles persistedRoles = new PersistedRoles(addressMatch, sendRoles, consumeRoles, createDurableQueueRoles, deleteDurableQueueRoles, createNonDurableQueueRoles, deleteNonDurableQueueRoles, manageRoles, browseRoles, createAddressRoles, deleteAddressRoles); storageManager.storeSecurityRoles(persistedRoles); } finally { blockOnIO(); } } @Override public void removeSecuritySettings(final String addressMatch) throws Exception { checkStarted(); clearIO(); try { server.getSecurityRepository().removeMatch(addressMatch); storageManager.deleteSecurityRoles(new SimpleString(addressMatch)); } finally { blockOnIO(); } } @Override public Object[] getRoles(final String addressMatch) throws Exception { checkStarted(); checkStarted(); clearIO(); try { Set<Role> roles = server.getSecurityRepository().getMatch(addressMatch); Object[] objRoles = new Object[roles.size()]; int i = 0; for (Role role : roles) { objRoles[i++] = new Object[]{role.getName(), CheckType.SEND.hasRole(role), CheckType.CONSUME.hasRole(role), CheckType.CREATE_DURABLE_QUEUE.hasRole(role), CheckType.DELETE_DURABLE_QUEUE.hasRole(role), CheckType.CREATE_NON_DURABLE_QUEUE.hasRole(role), CheckType.DELETE_NON_DURABLE_QUEUE.hasRole(role), CheckType.MANAGE.hasRole(role)}; } return objRoles; } finally { blockOnIO(); } } @Override public String getRolesAsJSON(final String addressMatch) throws Exception { checkStarted(); clearIO(); try { JsonArrayBuilder json = JsonLoader.createArrayBuilder(); Set<Role> roles = server.getSecurityRepository().getMatch(addressMatch); for (Role role : roles) { json.add(role.toJson()); } return json.build().toString(); } finally { blockOnIO(); } } @Override public String getAddressSettingsAsJSON(final String address) throws Exception { checkStarted(); AddressSettings addressSettings = server.getAddressSettingsRepository().getMatch(address); String policy = addressSettings.getAddressFullMessagePolicy() == AddressFullMessagePolicy.PAGE ? "PAGE" : addressSettings.getAddressFullMessagePolicy() == AddressFullMessagePolicy.BLOCK ? "BLOCK" : addressSettings.getAddressFullMessagePolicy() == AddressFullMessagePolicy.DROP ? "DROP" : "FAIL"; String consumerPolicy = addressSettings.getSlowConsumerPolicy() == SlowConsumerPolicy.NOTIFY ? "NOTIFY" : "KILL"; JsonObjectBuilder settings = JsonLoader.createObjectBuilder(); if (addressSettings.getDeadLetterAddress() != null) { settings.add("DLA", addressSettings.getDeadLetterAddress().toString()); } if (addressSettings.getExpiryAddress() != null) { settings.add("expiryAddress", addressSettings.getExpiryAddress().toString()); } return settings.add("expiryDelay", addressSettings.getExpiryDelay()) .add("maxDeliveryAttempts", addressSettings.getMaxDeliveryAttempts()) .add("pageCacheMaxSize", addressSettings.getPageCacheMaxSize()) .add("maxSizeBytes", addressSettings.getMaxSizeBytes()) .add("pageSizeBytes", addressSettings.getPageSizeBytes()) .add("redeliveryDelay", addressSettings.getRedeliveryDelay()) .add("redeliveryMultiplier", addressSettings.getRedeliveryMultiplier()) .add("maxRedeliveryDelay", addressSettings.getMaxRedeliveryDelay()) .add("redistributionDelay", addressSettings.getRedistributionDelay()) .add("lastValueQueue", addressSettings.isDefaultLastValueQueue()) .add("sendToDLAOnNoRoute", addressSettings.isSendToDLAOnNoRoute()) .add("addressFullMessagePolicy", policy) .add("slowConsumerThreshold", addressSettings.getSlowConsumerThreshold()) .add("slowConsumerCheckPeriod", addressSettings.getSlowConsumerCheckPeriod()) .add("slowConsumerPolicy", consumerPolicy) .add("autoCreateJmsQueues", addressSettings.isAutoCreateJmsQueues()) .add("autoCreateJmsTopics", addressSettings.isAutoCreateJmsTopics()) .add("autoDeleteJmsQueues", addressSettings.isAutoDeleteJmsQueues()) .add("autoDeleteJmsTopics", addressSettings.isAutoDeleteJmsQueues()) .add("autoCreateQueues", addressSettings.isAutoCreateQueues()) .add("autoDeleteQueues", addressSettings.isAutoDeleteQueues()) .add("autoCreateAddress", addressSettings.isAutoCreateAddresses()) .add("autoDeleteAddress", addressSettings.isAutoDeleteAddresses()) .build() .toString(); } @Override public void addAddressSettings(final String address, final String DLA, final String expiryAddress, final long expiryDelay, final boolean lastValueQueue, final int deliveryAttempts, final long maxSizeBytes, final int pageSizeBytes, final int pageMaxCacheSize, final long redeliveryDelay, final double redeliveryMultiplier, final long maxRedeliveryDelay, final long redistributionDelay, final boolean sendToDLAOnNoRoute, final String addressFullMessagePolicy, final long slowConsumerThreshold, final long slowConsumerCheckPeriod, final String slowConsumerPolicy, final boolean autoCreateJmsQueues, final boolean autoDeleteJmsQueues, final boolean autoCreateJmsTopics, final boolean autoDeleteJmsTopics) throws Exception { addAddressSettings(address, DLA, expiryAddress, expiryDelay, lastValueQueue, deliveryAttempts, maxSizeBytes, pageSizeBytes, pageMaxCacheSize, redeliveryDelay, redeliveryMultiplier, maxRedeliveryDelay, redistributionDelay, sendToDLAOnNoRoute, addressFullMessagePolicy, slowConsumerThreshold, slowConsumerCheckPeriod, slowConsumerPolicy, autoCreateJmsQueues, autoDeleteJmsQueues, autoCreateJmsTopics, autoDeleteJmsTopics, AddressSettings.DEFAULT_AUTO_CREATE_QUEUES, AddressSettings.DEFAULT_AUTO_DELETE_QUEUES, AddressSettings.DEFAULT_AUTO_CREATE_ADDRESSES, AddressSettings.DEFAULT_AUTO_DELETE_ADDRESSES); } @Override public void addAddressSettings(final String address, final String DLA, final String expiryAddress, final long expiryDelay, final boolean lastValueQueue, final int deliveryAttempts, final long maxSizeBytes, final int pageSizeBytes, final int pageMaxCacheSize, final long redeliveryDelay, final double redeliveryMultiplier, final long maxRedeliveryDelay, final long redistributionDelay, final boolean sendToDLAOnNoRoute, final String addressFullMessagePolicy, final long slowConsumerThreshold, final long slowConsumerCheckPeriod, final String slowConsumerPolicy, final boolean autoCreateJmsQueues, final boolean autoDeleteJmsQueues, final boolean autoCreateJmsTopics, final boolean autoDeleteJmsTopics, final boolean autoCreateQueues, final boolean autoDeleteQueues, final boolean autoCreateAddresses, final boolean autoDeleteAddresses) throws Exception { checkStarted(); // JBPAPP-6334 requested this to be pageSizeBytes > maxSizeBytes if (pageSizeBytes > maxSizeBytes && maxSizeBytes > 0) { throw new IllegalStateException("pageSize has to be lower than maxSizeBytes. Invalid argument (" + pageSizeBytes + " < " + maxSizeBytes + ")"); } if (maxSizeBytes < -1) { throw new IllegalStateException("Invalid argument on maxSizeBytes"); } AddressSettings addressSettings = new AddressSettings(); addressSettings.setDeadLetterAddress(DLA == null ? null : new SimpleString(DLA)); addressSettings.setExpiryAddress(expiryAddress == null ? null : new SimpleString(expiryAddress)); addressSettings.setExpiryDelay(expiryDelay); addressSettings.setDefaultLastValueQueue(lastValueQueue); addressSettings.setMaxDeliveryAttempts(deliveryAttempts); addressSettings.setPageCacheMaxSize(pageMaxCacheSize); addressSettings.setMaxSizeBytes(maxSizeBytes); addressSettings.setPageSizeBytes(pageSizeBytes); addressSettings.setRedeliveryDelay(redeliveryDelay); addressSettings.setRedeliveryMultiplier(redeliveryMultiplier); addressSettings.setMaxRedeliveryDelay(maxRedeliveryDelay); addressSettings.setRedistributionDelay(redistributionDelay); addressSettings.setSendToDLAOnNoRoute(sendToDLAOnNoRoute); if (addressFullMessagePolicy == null) { addressSettings.setAddressFullMessagePolicy(AddressFullMessagePolicy.PAGE); } else if (addressFullMessagePolicy.equalsIgnoreCase("PAGE")) { addressSettings.setAddressFullMessagePolicy(AddressFullMessagePolicy.PAGE); } else if (addressFullMessagePolicy.equalsIgnoreCase("DROP")) { addressSettings.setAddressFullMessagePolicy(AddressFullMessagePolicy.DROP); } else if (addressFullMessagePolicy.equalsIgnoreCase("BLOCK")) { addressSettings.setAddressFullMessagePolicy(AddressFullMessagePolicy.BLOCK); } else if (addressFullMessagePolicy.equalsIgnoreCase("FAIL")) { addressSettings.setAddressFullMessagePolicy(AddressFullMessagePolicy.FAIL); } addressSettings.setSlowConsumerThreshold(slowConsumerThreshold); addressSettings.setSlowConsumerCheckPeriod(slowConsumerCheckPeriod); if (slowConsumerPolicy == null) { addressSettings.setSlowConsumerPolicy(SlowConsumerPolicy.NOTIFY); } else if (slowConsumerPolicy.equalsIgnoreCase("NOTIFY")) { addressSettings.setSlowConsumerPolicy(SlowConsumerPolicy.NOTIFY); } else if (slowConsumerPolicy.equalsIgnoreCase("KILL")) { addressSettings.setSlowConsumerPolicy(SlowConsumerPolicy.KILL); } addressSettings.setAutoCreateJmsQueues(autoCreateJmsQueues); addressSettings.setAutoDeleteJmsQueues(autoDeleteJmsQueues); addressSettings.setAutoCreateJmsTopics(autoCreateJmsTopics); addressSettings.setAutoDeleteJmsTopics(autoDeleteJmsTopics); addressSettings.setAutoCreateQueues(autoCreateQueues); addressSettings.setAutoDeleteQueues(autoDeleteQueues); addressSettings.setAutoCreateAddresses(autoCreateAddresses); addressSettings.setAutoDeleteAddresses(autoDeleteAddresses); server.getAddressSettingsRepository().addMatch(address, addressSettings); storageManager.storeAddressSetting(new PersistedAddressSetting(new SimpleString(address), addressSettings)); } @Override public void removeAddressSettings(final String addressMatch) throws Exception { checkStarted(); server.getAddressSettingsRepository().removeMatch(addressMatch); storageManager.deleteAddressSetting(new SimpleString(addressMatch)); } public void sendQueueInfoToQueue(final String queueName, final String address) throws Exception { checkStarted(); clearIO(); try { postOffice.sendQueueInfoToQueue(new SimpleString(queueName), new SimpleString(address == null ? "" : address)); GroupingHandler handler = server.getGroupingHandler(); if (handler != null) { // the group handler would miss responses if the group was requested before the reset was done // on that case we ask the groupinghandler to replay its send in case it's waiting for the information handler.resendPending(); } } finally { blockOnIO(); } } @Override public String[] getDivertNames() { checkStarted(); clearIO(); try { Object[] diverts = server.getManagementService().getResources(DivertControl.class); String[] names = new String[diverts.length]; for (int i = 0; i < diverts.length; i++) { DivertControl divert = (DivertControl) diverts[i]; names[i] = divert.getUniqueName(); } return names; } finally { blockOnIO(); } } @Override public void createDivert(final String name, final String routingName, final String address, final String forwardingAddress, final boolean exclusive, final String filterString, final String transformerClassName) throws Exception { createDivert(name, routingName, address, forwardingAddress, exclusive, filterString, transformerClassName, ActiveMQDefaultConfiguration.getDefaultDivertRoutingType()); } @Override public void createDivert(final String name, final String routingName, final String address, final String forwardingAddress, final boolean exclusive, final String filterString, final String transformerClassName, final String routingType) throws Exception { createDivert(name, routingName, address, forwardingAddress, exclusive, filterString, transformerClassName, (String) null, routingType); } @Override public void createDivert(final String name, final String routingName, final String address, final String forwardingAddress, final boolean exclusive, final String filterString, final String transformerClassName, final String transformerPropertiesAsJSON, final String routingType) throws Exception { createDivert(name, routingName, address, forwardingAddress, exclusive, filterString, transformerClassName, JsonUtil.readJsonProperties(transformerPropertiesAsJSON), routingType); } @Override public void createDivert(final String name, final String routingName, final String address, final String forwardingAddress, final boolean exclusive, final String filterString, final String transformerClassName, final Map<String, String> transformerProperties, final String routingType) throws Exception { checkStarted(); clearIO(); try { TransformerConfiguration transformerConfiguration = transformerClassName == null ? null : new TransformerConfiguration(transformerClassName).setProperties(transformerProperties); DivertConfiguration config = new DivertConfiguration().setName(name).setRoutingName(routingName).setAddress(address).setForwardingAddress(forwardingAddress).setExclusive(exclusive).setFilterString(filterString).setTransformerConfiguration(transformerConfiguration).setRoutingType(DivertConfigurationRoutingType.valueOf(routingType)); server.deployDivert(config); } finally { blockOnIO(); } } @Override public void destroyDivert(final String name) throws Exception { checkStarted(); clearIO(); try { server.destroyDivert(SimpleString.toSimpleString(name)); } finally { blockOnIO(); } } @Override public String[] getBridgeNames() { checkStarted(); clearIO(); try { Object[] bridges = server.getManagementService().getResources(BridgeControl.class); String[] names = new String[bridges.length]; for (int i = 0; i < bridges.length; i++) { BridgeControl bridge = (BridgeControl) bridges[i]; names[i] = bridge.getName(); } return names; } finally { blockOnIO(); } } @Override public void createBridge(final String name, final String queueName, final String forwardingAddress, final String filterString, final String transformerClassName, final long retryInterval, final double retryIntervalMultiplier, final int initialConnectAttempts, final int reconnectAttempts, final boolean useDuplicateDetection, final int confirmationWindowSize, final int producerWindowSize, final long clientFailureCheckPeriod, final String staticConnectorsOrDiscoveryGroup, boolean useDiscoveryGroup, final boolean ha, final String user, final String password) throws Exception { createBridge(name, queueName, forwardingAddress, filterString, transformerClassName, (String) null, retryInterval, retryIntervalMultiplier, initialConnectAttempts, reconnectAttempts, useDuplicateDetection, confirmationWindowSize, producerWindowSize, clientFailureCheckPeriod, staticConnectorsOrDiscoveryGroup, useDiscoveryGroup, ha, user, password); } @Override public void createBridge(final String name, final String queueName, final String forwardingAddress, final String filterString, final String transformerClassName, final String transformerPropertiesAsJSON, final long retryInterval, final double retryIntervalMultiplier, final int initialConnectAttempts, final int reconnectAttempts, final boolean useDuplicateDetection, final int confirmationWindowSize, final int producerWindowSize, final long clientFailureCheckPeriod, final String staticConnectorsOrDiscoveryGroup, boolean useDiscoveryGroup, final boolean ha, final String user, final String password) throws Exception { createBridge(name, queueName, forwardingAddress, filterString, transformerClassName, JsonUtil.readJsonProperties(transformerPropertiesAsJSON), retryInterval, retryIntervalMultiplier, initialConnectAttempts, reconnectAttempts, useDuplicateDetection, confirmationWindowSize, producerWindowSize, clientFailureCheckPeriod, staticConnectorsOrDiscoveryGroup, useDiscoveryGroup, ha, user, password); } @Override public void createBridge(final String name, final String queueName, final String forwardingAddress, final String filterString, final String transformerClassName, final Map<String, String> transformerProperties, final long retryInterval, final double retryIntervalMultiplier, final int initialConnectAttempts, final int reconnectAttempts, final boolean useDuplicateDetection, final int confirmationWindowSize, final int producerWindowSize, final long clientFailureCheckPeriod, final String staticConnectorsOrDiscoveryGroup, boolean useDiscoveryGroup, final boolean ha, final String user, final String password) throws Exception { checkStarted(); clearIO(); try { TransformerConfiguration transformerConfiguration = transformerClassName == null ? null : new TransformerConfiguration(transformerClassName).setProperties(transformerProperties); BridgeConfiguration config = new BridgeConfiguration().setName(name).setQueueName(queueName).setForwardingAddress(forwardingAddress).setFilterString(filterString).setTransformerConfiguration(transformerConfiguration).setClientFailureCheckPeriod(clientFailureCheckPeriod).setRetryInterval(retryInterval).setRetryIntervalMultiplier(retryIntervalMultiplier).setInitialConnectAttempts(initialConnectAttempts).setReconnectAttempts(reconnectAttempts).setUseDuplicateDetection(useDuplicateDetection).setConfirmationWindowSize(confirmationWindowSize).setProducerWindowSize(producerWindowSize).setHA(ha).setUser(user).setPassword(password); if (useDiscoveryGroup) { config.setDiscoveryGroupName(staticConnectorsOrDiscoveryGroup); } else { config.setStaticConnectors(ListUtil.toList(staticConnectorsOrDiscoveryGroup)); } server.deployBridge(config); } finally { blockOnIO(); } } @Override public void createBridge(final String name, final String queueName, final String forwardingAddress, final String filterString, final String transformerClassName, final long retryInterval, final double retryIntervalMultiplier, final int initialConnectAttempts, final int reconnectAttempts, final boolean useDuplicateDetection, final int confirmationWindowSize, final long clientFailureCheckPeriod, final String staticConnectorsOrDiscoveryGroup, boolean useDiscoveryGroup, final boolean ha, final String user, final String password) throws Exception { checkStarted(); clearIO(); try { TransformerConfiguration transformerConfiguration = transformerClassName == null ? null : new TransformerConfiguration(transformerClassName); BridgeConfiguration config = new BridgeConfiguration().setName(name).setQueueName(queueName).setForwardingAddress(forwardingAddress).setFilterString(filterString).setTransformerConfiguration(transformerConfiguration).setClientFailureCheckPeriod(clientFailureCheckPeriod).setRetryInterval(retryInterval).setRetryIntervalMultiplier(retryIntervalMultiplier).setInitialConnectAttempts(initialConnectAttempts).setReconnectAttempts(reconnectAttempts).setUseDuplicateDetection(useDuplicateDetection).setConfirmationWindowSize(confirmationWindowSize).setHA(ha).setUser(user).setPassword(password); if (useDiscoveryGroup) { config.setDiscoveryGroupName(staticConnectorsOrDiscoveryGroup); } else { config.setStaticConnectors(ListUtil.toList(staticConnectorsOrDiscoveryGroup)); } server.deployBridge(config); } finally { blockOnIO(); } } @Override public void destroyBridge(final String name) throws Exception { checkStarted(); clearIO(); try { server.destroyBridge(name); } finally { blockOnIO(); } } @Override public void createConnectorService(final String name, final String factoryClass, final Map<String, Object> parameters) throws Exception { checkStarted(); clearIO(); try { final ConnectorServiceConfiguration config = new ConnectorServiceConfiguration().setName(name).setFactoryClassName(factoryClass).setParams(parameters); ConnectorServiceFactory factory = server.getServiceRegistry().getConnectorService(config); server.getConnectorsService().createService(config, factory); } finally { blockOnIO(); } } @Override public void destroyConnectorService(final String name) throws Exception { checkStarted(); clearIO(); try { server.getConnectorsService().destroyService(name); } finally { blockOnIO(); } } @Override public String[] getConnectorServices() { checkStarted(); clearIO(); try { return server.getConnectorsService().getConnectors().keySet().toArray(new String[0]); } finally { blockOnIO(); } } @Override public void forceFailover() throws Exception { checkStarted(); clearIO(); Thread t = new Thread() { @Override public void run() { try { server.stop(true, true); } catch (Throwable e) { logger.warn(e.getMessage(), e); } } }; t.start(); } public void updateDuplicateIdCache(String address, Object[] ids) throws Exception { clearIO(); try { DuplicateIDCache duplicateIDCache = server.getPostOffice().getDuplicateIDCache(new SimpleString(address)); for (Object id : ids) { duplicateIDCache.addToCache(((String) id).getBytes(), null); } } finally { blockOnIO(); } } @Override public void scaleDown(String connector) throws Exception { checkStarted(); clearIO(); HAPolicy haPolicy = server.getHAPolicy(); if (haPolicy instanceof LiveOnlyPolicy) { LiveOnlyPolicy liveOnlyPolicy = (LiveOnlyPolicy) haPolicy; if (liveOnlyPolicy.getScaleDownPolicy() == null) { liveOnlyPolicy.setScaleDownPolicy(new ScaleDownPolicy()); } liveOnlyPolicy.getScaleDownPolicy().setEnabled(true); if (connector != null) { liveOnlyPolicy.getScaleDownPolicy().getConnectors().add(0, connector); } server.fail(true); } } @Override public String listNetworkTopology() throws Exception { checkStarted(); clearIO(); try { JsonArrayBuilder brokers = JsonLoader.createArrayBuilder(); ClusterManager clusterManager = server.getClusterManager(); if (clusterManager != null) { Set<ClusterConnection> clusterConnections = clusterManager.getClusterConnections(); for (ClusterConnection clusterConnection : clusterConnections) { Topology topology = clusterConnection.getTopology(); Collection<TopologyMemberImpl> members = topology.getMembers(); for (TopologyMemberImpl member : members) { JsonObjectBuilder obj = JsonLoader.createObjectBuilder(); TransportConfiguration live = member.getLive(); if (live != null) { obj.add("nodeID", member.getNodeId()).add("live", live.getParams().get("host") + ":" + live.getParams().get("port")); TransportConfiguration backup = member.getBackup(); if (backup != null) { obj.add("backup", backup.getParams().get("host") + ":" + backup.getParams().get("port")); } } brokers.add(obj); } } } return brokers.build().toString(); } finally { blockOnIO(); } } // NotificationEmitter implementation ---------------------------- @Override public void removeNotificationListener(final NotificationListener listener, final NotificationFilter filter, final Object handback) throws ListenerNotFoundException { clearIO(); try { broadcaster.removeNotificationListener(listener, filter, handback); } finally { blockOnIO(); } } @Override public void removeNotificationListener(final NotificationListener listener) throws ListenerNotFoundException { clearIO(); try { broadcaster.removeNotificationListener(listener); } finally { blockOnIO(); } } @Override public void addNotificationListener(final NotificationListener listener, final NotificationFilter filter, final Object handback) throws IllegalArgumentException { clearIO(); try { broadcaster.addNotificationListener(listener, filter, handback); } finally { blockOnIO(); } } @Override public MBeanNotificationInfo[] getNotificationInfo() { CoreNotificationType[] values = CoreNotificationType.values(); String[] names = new String[values.length]; for (int i = 0; i < values.length; i++) { names[i] = values[i].toString(); } return new MBeanNotificationInfo[]{new MBeanNotificationInfo(names, this.getClass().getName(), "Notifications emitted by a Core Server")}; } // Package protected --------------------------------------------- // Protected ----------------------------------------------------- // Private ------------------------------------------------------- private synchronized void setMessageCounterEnabled(final boolean enable) { if (isStarted()) { if (configuration.isMessageCounterEnabled() && !enable) { stopMessageCounters(); } else if (!configuration.isMessageCounterEnabled() && enable) { startMessageCounters(); } } configuration.setMessageCounterEnabled(enable); } private void startMessageCounters() { messageCounterManager.start(); } private void stopMessageCounters() { messageCounterManager.stop(); messageCounterManager.resetAllCounters(); messageCounterManager.resetAllCounterHistories(); } @Override public long getConnectionTTLOverride() { return configuration.getConnectionTTLOverride(); } @Override public int getIDCacheSize() { return configuration.getIDCacheSize(); } @Override public String getLargeMessagesDirectory() { return configuration.getLargeMessagesDirectory(); } @Override public String getManagementAddress() { return configuration.getManagementAddress().toString(); } @Override public String getNodeID() { return server.getNodeID().toString(); } @Override public String getManagementNotificationAddress() { return configuration.getManagementNotificationAddress().toString(); } @Override public long getMessageExpiryScanPeriod() { return configuration.getMessageExpiryScanPeriod(); } @Override public long getMessageExpiryThreadPriority() { return configuration.getMessageExpiryThreadPriority(); } @Override public long getTransactionTimeout() { return configuration.getTransactionTimeout(); } @Override public long getTransactionTimeoutScanPeriod() { return configuration.getTransactionTimeoutScanPeriod(); } @Override public boolean isPersistDeliveryCountBeforeDelivery() { return configuration.isPersistDeliveryCountBeforeDelivery(); } @Override public boolean isPersistIDCache() { return configuration.isPersistIDCache(); } @Override public boolean isWildcardRoutingEnabled() { return configuration.isWildcardRoutingEnabled(); } @Override protected MBeanOperationInfo[] fillMBeanOperationInfo() { return MBeanInfoHelper.getMBeanOperationsInfo(ActiveMQServerControl.class); } @Override protected MBeanAttributeInfo[] fillMBeanAttributeInfo() { return MBeanInfoHelper.getMBeanAttributesInfo(ActiveMQServerControl.class); } private void checkStarted() { if (!server.isStarted()) { throw new IllegalStateException("Broker is not started. It can not be managed yet"); } } public String[] listTargetAddresses(final String sessionID) { ServerSession session = server.getSessionByID(sessionID); if (session != null) { return session.getTargetAddresses(); } return new String[0]; } @Override public void onNotification(org.apache.activemq.artemis.core.server.management.Notification notification) { if (!(notification.getType() instanceof CoreNotificationType)) return; CoreNotificationType type = (CoreNotificationType) notification.getType(); TypedProperties prop = notification.getProperties(); this.broadcaster.sendNotification(new Notification(type.toString(), this, notifSeq.incrementAndGet(), notification.toString())); } }
package ua.tef.BLOCK02.task_02.game.game_001; public interface GlobalValues { int PRIMARY_MIN_BARRIER = 0; int PRIMARY_MAX_BARRIER = 100; }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. package com.microsoft.commondatamodel.objectmodel.utilities; import com.microsoft.commondatamodel.objectmodel.cdm.CdmTraitDefinition; /** * @deprecated This class is extremely likely to be removed in the public interface, and not meant * to be called externally at all. Please refrain from using it. */ @Deprecated public class ResolveContextScope { private CdmTraitDefinition currentTrait; private int currentParameter; /** * @deprecated This function is extremely likely to be removed in the public interface, and not meant * to be called externally at all. Please refrain from using it. */ @Deprecated public void setCurrentParameter(final int currentParameter) { this.currentParameter = currentParameter; } /** * @deprecated This function is extremely likely to be removed in the public interface, and not meant * to be called externally at all. Please refrain from using it. */ @Deprecated public CdmTraitDefinition getCurrentTrait() { return currentTrait; } /** * @deprecated This function is extremely likely to be removed in the public interface, and not meant * to be called externally at all. Please refrain from using it. */ @Deprecated public int getCurrentParameter() { return currentParameter; } /** * @deprecated This function is extremely likely to be removed in the public interface, and not meant * to be called externally at all. Please refrain from using it. */ @Deprecated public void setCurrentTrait(final CdmTraitDefinition currentTrait) { this.currentTrait = currentTrait; } }
/******************************************************************************* * Copyright 卫志强 QQ:598748873@qq.com Inc. All rights reserved. 开源地址:https://gitee.com/doc_wei01/skyeye-report ******************************************************************************/ package com.skyeye.entity; /** * * @ClassName: LayoutType * @Description: 报表布局类型 * @author: skyeye云系列--卫志强 * @date: 2021/5/17 21:12 * * @Copyright: 2021 https://gitee.com/doc_wei01/skyeye-report Inc. All rights reserved. * 注意:本内容具体规则请参照readme执行,地址:https://gitee.com/doc_wei01/skyeye-report/blob/master/README.md */ public enum LayoutType { /** * 横向布局 */ HORIZONTAL(1), /** * 纵向布局; */ VERTICAL(2); private final int value; LayoutType(final int value) { this.value = value; } public static LayoutType valueOf(final int arg) { for (LayoutType item : LayoutType.values()) { if(item.getValue() == arg){ return item; } } return HORIZONTAL; } public int getValue() { return this.value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sling.jobs.impl; import com.google.common.collect.ImmutableMap; import org.apache.sling.jobs.Job; import org.apache.sling.jobs.JobUpdate; import org.apache.sling.jobs.JobUpdateBuilder; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.Map; /** */ public class JobUpdateBuilderImpl implements JobUpdateBuilder { private final String jobId; private final Job job; private JobUpdate.JobUpdateCommand command; private final ImmutableMap.Builder<String, Object> updateProperties = ImmutableMap.builder(); /** * Create a JobUpdateBuilder from a job. * @param job the job. */ public JobUpdateBuilderImpl(@Nonnull Job job) { this.job = job; this.jobId = null; } public JobUpdateBuilderImpl(@Nonnull String jobId) { this.jobId = jobId; this.job = null; } /** * Set the JobUpdateCommand * @param command the command. * @return this JobBuilder instance. */ @Nonnull @Override public JobUpdateBuilder command(@Nonnull JobUpdate.JobUpdateCommand command) { this.command = command; return this; } /** * Set a property to update. * @param name the name of the property * @param value the value of the property which may be null. To remove the property set the value to JobUpdate.JobPropertyAction.REMOVE. * @return this JobBuilder instance. */ @Nonnull @Override public JobUpdateBuilder put(@Nonnull String name, @Nullable Object value) { if ( value == null) { this.updateProperties.put(name, JobUpdate.JobPropertyAction.REMOVE); } else { this.updateProperties.put(name, value); } return this; } @Nonnull @Override public JobUpdateBuilder putAll(@Nonnull Map<String, Object> properties) { this.updateProperties.putAll(properties); return this; } /** * Build the JobUpdate. * @return the JobUpdate. */ @Nonnull @Override public JobUpdate build() { if ( job != null) { return new JobUpdateImpl(job, command, updateProperties.build()); } else if ( command == JobUpdate.JobUpdateCommand.ABORT_JOB || command == JobUpdate.JobUpdateCommand.STOP_JOB) { return new JobUpdateImpl(jobId, command); } else { throw new IllegalStateException("Only possible to abort or stop a job by ID alone "); } } }
package testbank; public class ReverseWordsInAString_151 { public String reverseWords(String s) { int length = s.length(); if (length == 0) { return s; } String sum = ""; // 双指针定位 int start = 0; int end = start; while (start<length) { while(start<length && s.charAt(start)==' '){ start++; } end =start; while (start<length && end<length && s.charAt(end)!=' '){ end++; } if (start < length) { String sub_str = s.substring(start,end); sum = " "+ sub_str+sum; } start = end; } return sum.trim(); } public static void main(String[] args) { ReverseWordsInAString_151 test = new ReverseWordsInAString_151(); System.out.println(test.reverseWords("the sky is blue")); } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.util.bin.format.pdb2.pdbreader.symbol; import ghidra.app.util.bin.format.pdb2.pdbreader.*; /** * This class represents various flavors of Register Relative Address symbol. * <P> * Note: we do not necessarily understand each of these symbol type classes. Refer to the * base class for more information. */ public abstract class AbstractRegisterRelativeAddressMsSymbol extends AbstractMsSymbol implements NameMsSymbol { protected long offset; protected RecordNumber typeRecordNumber; protected int registerIndex; protected RegisterName registerName; protected String name; /** * Constructor for this symbol. * @param pdb {@link AbstractPdb} to which this symbol belongs. * @param reader {@link PdbByteReader} from which this symbol is deserialized. * @throws PdbException upon error parsing a field. */ public AbstractRegisterRelativeAddressMsSymbol(AbstractPdb pdb, PdbByteReader reader) throws PdbException { super(pdb, reader); } /** * Returns the offset. * @return Offset. */ public long getOffset() { return offset; } /** * Returns the type record number. * @return Type record number. */ public RecordNumber getTypeRecordNumber() { return typeRecordNumber; } /** * Returns the register index. * @return Register index. */ public int getRegisterIndex() { return registerIndex; } /** * Returns the register name. * @return Register name. */ public String getRegisterNameString() { return registerName.toString(); } /** * Returns the name. * @return Name. */ @Override public String getName() { return name; } @Override public void emit(StringBuilder builder) { builder.append(String.format("%s: %s+%08X, Type: %s, %s", getSymbolTypeName(), registerName.toString(), offset, pdb.getTypeRecord(typeRecordNumber), name)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.tools; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.util.Iterator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystemTestHelper; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.tools.DelegationTokenFetcher; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.junit.Before; import org.junit.Test; public class TestDelegationTokenFetcher { private DistributedFileSystem dfs; private Configuration conf; private URI uri; private static final String SERVICE_VALUE = "localhost:2005"; private static String tokenFile = "file.dta"; @Before public void init() throws URISyntaxException, IOException { dfs = mock(DistributedFileSystem.class); conf = new Configuration(); uri = new URI("hdfs://" + SERVICE_VALUE); FileSystemTestHelper.addFileSystemForTesting(uri, conf, dfs); } /** * Verify that when the DelegationTokenFetcher runs, it talks to the Namenode, * pulls out the correct user's token and successfully serializes it to disk. */ @Test public void expectedTokenIsRetrievedFromDFS() throws Exception { final byte[] ident = new DelegationTokenIdentifier(new Text("owner"), new Text("renewer"), new Text("realuser")).getBytes(); final byte[] pw = new byte[] { 42 }; final Text kind = new Text("MY-KIND"); final Text service = new Text(uri.toString()); // Create a token for the fetcher to fetch, wire NN to return it when asked // for this particular user. Token<DelegationTokenIdentifier> t = new Token<DelegationTokenIdentifier>( ident, pw, kind, service); when(dfs.getDelegationToken((String) null)).thenReturn(t); when(dfs.renewDelegationToken(eq(t))).thenReturn(1000L); when(dfs.getUri()).thenReturn(uri); FileSystem fileSys = FileSystem.getLocal(conf); try { DelegationTokenFetcher.main(new String[] { "-fs", uri.toString(), tokenFile }); Path p = new Path(fileSys.getWorkingDirectory(), tokenFile); Credentials creds = Credentials.readTokenStorageFile(p, conf); Iterator<Token<?>> itr = creds.getAllTokens().iterator(); // make sure we got back exactly the 1 token we expected assertTrue(itr.hasNext()); assertEquals(t, itr.next()); assertTrue(!itr.hasNext()); DelegationTokenFetcher.main(new String[] { "-fs", uri.toString(), "--print", tokenFile }); DelegationTokenFetcher.main(new String[] { "-fs", uri.toString(), "--renew", tokenFile }); DelegationTokenFetcher.main(new String[] { "-fs", uri.toString(), "--cancel", tokenFile }); verify(dfs).renewDelegationToken(eq(t)); verify(dfs).cancelDelegationToken(eq(t)); } finally { fileSys.delete(new Path(tokenFile), true); } } }
/********************************************************************************* * * Copyright 2014 BOUSSEJRA Malik Olivier, HALDEBIQUE Geoffroy, ROYER Johan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ********************************************************************************/ package functions.excels; import java.text.ParseException; import java.util.Calendar; import javax.persistence.PersistenceException; import models.Commune; import models.Espece; import models.EspeceSynonyme; import models.Fiche; import models.FicheHasMembre; import models.InformationsComplementaires; import models.Membre; import models.Observation; import models.StadeSexe; import models.UTMS; import org.apache.poi.ss.usermodel.Cell; import org.apache.poi.ss.usermodel.Row; import functions.DateUtil; public class RowCheckEdit { private Row row; private int rowNumber; private StringBuilder errorReport; private boolean noError = true; private Calendar creationTime; private InformationsComplementaires complement = null; private Observation observation = null; private UTMS utm = null; private String lieu_dit = ""; private Commune commune = null; private Calendar date_min = null; private Calendar date = null; private Espece espece = null; private Integer nombre = null; private StadeSexe stade_sexe = null; private Membre[] temoins; private String determinateur = ""; private String memo = ""; /** * Instancie la classe pour charger les informations dans une Row * @param row * @param rowNumber * @param errorReport */ public RowCheckEdit(Row row, int rowNumber, StringBuilder errorReport, Calendar creationTime){ this.row = row; this.rowNumber=rowNumber; this.errorReport=errorReport; this.creationTime=creationTime; } /** * Vérifie que la ligne donnée est juste. */ public void checkRow(){ //ID Cell cell = row.getCell(0); double info_id = -1; if(cell!=null && (info_id=cell.getNumericCellValue())>0) complement = InformationsComplementaires.find.byId((long) info_id); if(info_id!=0 && complement==null) addError("ID information complémentaire inexistante "+info_id); //ID observation cell = row.getCell(1); if(cell!=null && (info_id=cell.getNumericCellValue())>0) observation = Observation.find.byId((long) info_id); if(observation==null && complement==null) addError("ID observation inexistante "+info_id); //UTM cell = row.getCell(3); String utm_str = null; if(cell!=null){ utm_str = cell.getStringCellValue(); utm = UTMS.find.byId(utm_str); } if(utm==null) addError("Maille UTM inexistante : "+utm_str); //Lieu-dit cell = row.getCell(4); if(cell!=null) this.lieu_dit=cell.getStringCellValue(); //Commune cell = row.getCell(5); if(cell!=null){ String commune_nom = cell.getStringCellValue(); if(!commune_nom.isEmpty()){ commune = Commune.findFromNomApproximatif(commune_nom); if(commune==null) addError("La commune '"+commune_nom+"' n'est pas référencée."); } } //Date_min cell = row.getCell(6); if(cell!=null){ try{ String date_min_str = cell.getStringCellValue(); if(date_min_str!=null && !date_min_str.isEmpty()){ date_min = DateUtil.toCalendarExcel(date_min_str); } }catch(ParseException e){ addError("Date min invalide"); } } //Date cell = row.getCell(7); if(cell!=null){ try{ String date_str = cell.getStringCellValue(); if(date_str!=null){ date = DateUtil.toCalendarExcel(date_str); } }catch(ParseException e){ addError("Date invalide"); } } if(date==null) addError("La date est vide !"); //Espèce cell = row.getCell(8); if(cell==null) addError("Pas d'espèce."); else{ String espece_nom = cell.getStringCellValue(); if((espece=Espece.find.where().eq("espece_nom", espece_nom).findUnique())==null){ try{ EspeceSynonyme syn = EspeceSynonyme.find.where().eq("synonyme_nom", espece_nom).findUnique(); if(syn!=null) espece=syn.synonyme_espece; else addError("L'espèce "+espece_nom+" n'existe pas."); }catch(PersistenceException e){ addError("Deux espèces synonymes ont le même nom : "+espece_nom); } } } //Nombre cell = row.getCell(9); if(cell!=null){ try{ this.nombre = (int) cell.getNumericCellValue(); }catch(IllegalStateException | NumberFormatException e){ this.nombre = null; } this.nombre = (this.nombre!=null && this.nombre==0) ? null : nombre; } //Stade cell = row.getCell(10); if(cell!=null){ String sexe = cell.getStringCellValue(); if(sexe!=null && !sexe.equals("")){ if(sexe.equals("oeuf") || sexe.equals("OEuf") || sexe.equals("Oeuf")) stade_sexe=StadeSexe.find.byId(6); else{ if((stade_sexe=StadeSexe.find.where().eq("stade_sexe_intitule",sexe).findUnique())==null) addError("Le stade/sexe "+sexe+" n'existe pas."); if(stade_sexe!=null && espece!=null && !espece.getGroupe().getStadesSexes().contains(stade_sexe)){ addError("Le stade/sexe "+stade_sexe+" n'est pas valable pour le groupe "+espece.getGroupe()); } } } } //Témoin(s) cell = row.getCell(11); if(cell==null) addError("Témoin non spécifié."); else{ String temoins_str=cell.getStringCellValue(); if(temoins_str==null) addError("Témoin non spécifié."); else{ String[] temoins_str_tab = temoins_str.split(","); temoins = new Membre[temoins_str_tab.length]; for(int i = 0; i<temoins_str_tab.length; i++){ temoins_str_tab[i]=temoins_str_tab[i].trim(); temoins[i]=Membre.find.where().eq("membre_nom", temoins_str_tab[i]).findUnique(); if(temoins[i]==null) addError("Le membre '"+temoins_str_tab[i]+"' n'est pas référencé."); } } } //Déterminateur cell = row.getCell(12); if(cell!=null) determinateur = cell.getStringCellValue(); //Mémo cell = row.getCell(13); try{ if(cell!=null) memo = cell.getStringCellValue(); }catch(IllegalStateException e){ addError("Le champ mémo n'est pas une chaîne de caractères"); } } /** * Ajoute une erreur dans la liste des erreurs * @param s */ public void addError(String s){ noError=false; errorReport.append("Ligne "+(rowNumber+1)+": "); errorReport.append(s+"<br>"); } public boolean noError(){ return noError; } public String getErrors(){ return errorReport.toString(); } /** * Sauvegarde la Row dans la base de données. */ public void saveToDatabase() { Observation o; if(this.complement!=null) o = this.complement.informations_complementaires_observation; else o = this.observation; if(complement!=null){ Fiche f = o.observation_fiche; f.fiche_utm=this.utm; f.fiche_lieudit=this.lieu_dit; f.fiche_commune=this.commune; f.fiche_date_min=this.date_min; f.fiche_date=this.date; f.fiche_memo=this.memo; f.update(); o.observation_espece=this.espece; o.observation_determinateur=this.determinateur; o.observation_date_derniere_modification=this.creationTime; o.observation_date_validation=this.creationTime; o.update(); for(FicheHasMembre fhm : FicheHasMembre.find.where().eq("fiche", f).findList()) fhm.delete(); for(Membre temoin : temoins){ new FicheHasMembre(temoin,f).save(); } } if(complement!=null){ complement.informations_complementaires_nombre_de_specimens=this.nombre; complement.informations_complementaires_stade_sexe=this.stade_sexe; complement.update(); }else{ new InformationsComplementaires(o, nombre, stade_sexe).save(); } } }
package com.felhr.protocal; import com.felhr.utils.CRC16; import java.lang.reflect.Array; import java.util.Arrays; public class G3DCmdResult extends ProtocalCmd { public int m3D; public int mStatus; public G3DCmdResult(int cmd, byte[] para){ super(); mCmdValue = cmd; mParaLen = (int)para[0]; if(Array.getLength(para) == mParaLen + 1 && mParaLen > 0) { int index = para[mParaLen]; if(index > 0 && index < mParaLen ){ para[index] -= 1; } mCRC = ((para[mParaLen-1]<<8) & 0xFF00)| (para[mParaLen-2] &0xFF); int crc = 0; if(isCalcCRC()) { crc = CRC16.calc(Arrays.copyOfRange(para, 1, mParaLen - 2)); } if(mCRC == crc) { mSessionID = (int) (((para[2] << 8) & 0xFF00) | (para[1] & 0xFF)); //ret cmd para: len(1)+session_id(2)+ result(1) + 3d(1) + crc(2) + index(1) mStatus = (int) para[3]; m3D = (int) para[4]; } } else { mValid = false; } } }
/* * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.oxm.castor; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.Writer; import java.util.Map; import javax.xml.stream.XMLEventReader; import javax.xml.stream.XMLEventWriter; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; import org.exolab.castor.mapping.Mapping; import org.exolab.castor.mapping.MappingException; import org.exolab.castor.util.ObjectFactory; import org.exolab.castor.xml.IDResolver; import org.exolab.castor.xml.MarshalException; import org.exolab.castor.xml.Marshaller; import org.exolab.castor.xml.ResolverException; import org.exolab.castor.xml.UnmarshalHandler; import org.exolab.castor.xml.Unmarshaller; import org.exolab.castor.xml.ValidationException; import org.exolab.castor.xml.XMLClassDescriptorResolver; import org.exolab.castor.xml.XMLContext; import org.exolab.castor.xml.XMLException; import org.w3c.dom.Node; import org.xml.sax.ContentHandler; import org.xml.sax.EntityResolver; import org.xml.sax.InputSource; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.ext.LexicalHandler; import org.springframework.beans.factory.BeanClassLoaderAware; import org.springframework.beans.factory.InitializingBean; import org.springframework.core.io.Resource; import org.springframework.oxm.MarshallingFailureException; import org.springframework.oxm.UncategorizedMappingException; import org.springframework.oxm.UnmarshallingFailureException; import org.springframework.oxm.ValidationFailureException; import org.springframework.oxm.XmlMappingException; import org.springframework.oxm.support.AbstractMarshaller; import org.springframework.oxm.support.SaxResourceUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.xml.DomUtils; import org.springframework.util.xml.StaxUtils; /** * Implementation of the {@code Marshaller} interface for Castor. By default, Castor does * not require any further configuration, though setting target classes, target packages or * providing a mapping file can be used to have more control over the behavior of Castor. * * <p>If a target class is specified using {@code setTargetClass}, the {@code CastorMarshaller} * can only be used to unmarshal XML that represents that specific class. If you want to unmarshal * multiple classes, you have to provide a mapping file using {@code setMappingLocations}. * * <p>Due to limitations of Castor's API, it is required to set the encoding used for * writing to output streams. It defaults to {@code UTF-8}. * * @author Arjen Poutsma * @author Jakub Narloch * @author Juergen Hoeller * @since 3.0 * @see #setEncoding(String) * @see #setTargetClass(Class) * @see #setTargetPackages(String[]) * @see #setMappingLocation(Resource) * @see #setMappingLocations(Resource[]) */ public class CastorMarshaller extends AbstractMarshaller implements InitializingBean, BeanClassLoaderAware { /** * The default encoding used for stream access: UTF-8. */ public static final String DEFAULT_ENCODING = "UTF-8"; private Resource[] mappingLocations; private String encoding = DEFAULT_ENCODING; private Class[] targetClasses; private String[] targetPackages; private boolean validating = false; private boolean suppressNamespaces = false; private boolean suppressXsiType = false; private boolean marshalAsDocument = true; private boolean marshalExtendedType = true; private String rootElement; private String noNamespaceSchemaLocation; private String schemaLocation; private boolean useXSITypeAtRoot = false; private boolean whitespacePreserve = false; private boolean ignoreExtraAttributes = true; private boolean ignoreExtraElements = false; private Object rootObject; private boolean reuseObjects = false; private boolean clearCollections = false; private Map<String, String> castorProperties; private Map<String, String> doctypes; private Map<String, String> processingInstructions; private Map<String, String> namespaceMappings; private Map<String, String> namespaceToPackageMapping; private EntityResolver entityResolver; private XMLClassDescriptorResolver classDescriptorResolver; private IDResolver idResolver; private ObjectFactory objectFactory; private ClassLoader beanClassLoader; private XMLContext xmlContext; /** * Set the encoding to be used for stream access. * @see #DEFAULT_ENCODING */ public void setEncoding(String encoding) { this.encoding = encoding; } /** * Set the locations of the Castor XML mapping files. */ public void setMappingLocation(Resource mappingLocation) { this.mappingLocations = new Resource[]{mappingLocation}; } /** * Set the locations of the Castor XML mapping files. */ public void setMappingLocations(Resource[] mappingLocations) { this.mappingLocations = mappingLocations; } /** * Set the Castor target class. * @see #setTargetPackage * @see #setMappingLocation */ public void setTargetClass(Class targetClass) { this.targetClasses = new Class[]{targetClass}; } /** * Set the Castor target classes. * @see #setTargetPackages * @see #setMappingLocations */ public void setTargetClasses(Class[] targetClasses) { this.targetClasses = targetClasses; } /** * Set the name of a package with the Castor descriptor classes. */ public void setTargetPackage(String targetPackage) { this.targetPackages = new String[] {targetPackage}; } /** * Set the names of packages with the Castor descriptor classes. */ public void setTargetPackages(String[] targetPackages) { this.targetPackages = targetPackages; } /** * Set whether this marshaller should validate in- and outgoing documents. * <p>Default is {@code false}. * @see Marshaller#setValidation(boolean) */ public void setValidating(boolean validating) { this.validating = validating; } /** * Sets whether this marshaller should output namespaces. * <p>The default is {@code false}, i.e. namespaces are written. * @see org.exolab.castor.xml.Marshaller#setSuppressNamespaces(boolean) */ public void setSuppressNamespaces(boolean suppressNamespaces) { this.suppressNamespaces = suppressNamespaces; } /** * Set whether this marshaller should output the {@code xsi:type} attribute. * <p>The default is {@code false}, i.e. the {@code xsi:type} is written. * @see org.exolab.castor.xml.Marshaller#setSuppressXSIType(boolean) */ public void setSuppressXsiType(boolean suppressXsiType) { this.suppressXsiType = suppressXsiType; } /** * Set whether this marshaller should output the xml declaration. * <p>The default is {@code true}, the XML declaration will be written. * @see org.exolab.castor.xml.Marshaller#setMarshalAsDocument(boolean) */ public void setMarshalAsDocument(boolean marshalAsDocument) { this.marshalAsDocument = marshalAsDocument; } /** * Set whether this marshaller should output for given type the {@code xsi:type} attribute. * <p>The default is {@code true}, the {@code xsi:type} attribute will be written. * @see org.exolab.castor.xml.Marshaller#setMarshalExtendedType(boolean) */ public void setMarshalExtendedType(boolean marshalExtendedType) { this.marshalExtendedType = marshalExtendedType; } /** * Set the name of the root element. * @see org.exolab.castor.xml.Marshaller#setRootElement(String) */ public void setRootElement(String rootElement) { this.rootElement = rootElement; } /** * Set the value of {@code xsi:noNamespaceSchemaLocation} attribute. When set, the * {@code xsi:noNamespaceSchemaLocation} attribute will be written for the root element. * @see org.exolab.castor.xml.Marshaller#setNoNamespaceSchemaLocation(String) */ public void setNoNamespaceSchemaLocation(String noNamespaceSchemaLocation) { this.noNamespaceSchemaLocation = noNamespaceSchemaLocation; } /** * Set the value of {@code xsi:schemaLocation} attribute. When set, the * {@code xsi:schemaLocation} attribute will be written for the root element. * @see org.exolab.castor.xml.Marshaller#setSchemaLocation(String) */ public void setSchemaLocation(String schemaLocation) { this.schemaLocation = schemaLocation; } /** * Sets whether this marshaller should output the {@code xsi:type} attribute for the root element. * This can be useful when the type of the element can not be simply determined from the element name. * <p>The default is {@code false}: The {@code xsi:type} attribute for the root element won't be written. * @see org.exolab.castor.xml.Marshaller#setUseXSITypeAtRoot(boolean) */ public void setUseXSITypeAtRoot(boolean useXSITypeAtRoot) { this.useXSITypeAtRoot = useXSITypeAtRoot; } /** * Set whether the Castor {@link Unmarshaller} should preserve "ignorable" whitespace. * <p>Default is {@code false}. * @see org.exolab.castor.xml.Unmarshaller#setWhitespacePreserve(boolean) */ public void setWhitespacePreserve(boolean whitespacePreserve) { this.whitespacePreserve = whitespacePreserve; } /** * Set whether the Castor {@link Unmarshaller} should ignore attributes that do not match a specific field. * <p>Default is {@code true}: Extra attributes are ignored. * @see org.exolab.castor.xml.Unmarshaller#setIgnoreExtraAttributes(boolean) */ public void setIgnoreExtraAttributes(boolean ignoreExtraAttributes) { this.ignoreExtraAttributes = ignoreExtraAttributes; } /** * Set whether the Castor {@link Unmarshaller} should ignore elements that do not match a specific field. * <p>Default is {@code false}: Extra elements are flagged as an error. * @see org.exolab.castor.xml.Unmarshaller#setIgnoreExtraElements(boolean) */ public void setIgnoreExtraElements(boolean ignoreExtraElements) { this.ignoreExtraElements = ignoreExtraElements; } /** * Set the expected root object for the unmarshaller, into which the source will be unmarshalled. * @see org.exolab.castor.xml.Unmarshaller#setObject(Object) * @deprecated in favor of {@link #setRootObject} */ @Deprecated public void setObject(Object root) { this.rootObject = root; } /** * Set the expected root object for the unmarshaller, into which the source will be unmarshalled. * @see org.exolab.castor.xml.Unmarshaller#setObject(Object) */ public void setRootObject(Object root) { this.rootObject = root; } /** * Set whether this unmarshaller should re-use objects. * This will be only used when unmarshalling to an existing object. * <p>The default is {@code false}, which means that the objects won't be re-used. * @see org.exolab.castor.xml.Unmarshaller#setReuseObjects(boolean) */ public void setReuseObjects(boolean reuseObjects) { this.reuseObjects = reuseObjects; } /** * Sets whether this unmarshaller should clear collections upon the first use. * <p>The default is {@code false} which means that marshaller won't clear collections. * @see org.exolab.castor.xml.Unmarshaller#setClearCollections(boolean) */ public void setClearCollections(boolean clearCollections) { this.clearCollections = clearCollections; } /** * Set Castor-specific properties for marshalling and unmarshalling. * Each entry key is considered the property name and each value the property value. * @see org.exolab.castor.xml.Marshaller#setProperty(String, String) * @see org.exolab.castor.xml.Unmarshaller#setProperty(String, String) */ public void setCastorProperties(Map<String, String> castorProperties) { this.castorProperties = castorProperties; } /** * Set the map containing document type definition for the marshaller. * Each entry has system id as key and public id as value. * @see org.exolab.castor.xml.Marshaller#setDoctype(String, String) */ public void setDoctypes(Map<String, String> doctypes) { this.doctypes = doctypes; } /** * Sets the processing instructions that will be used by during marshalling. * Keys are the processing targets and values contain the processing data. * @see org.exolab.castor.xml.Marshaller#addProcessingInstruction(String, String) */ public void setProcessingInstructions(Map<String, String> processingInstructions) { this.processingInstructions = processingInstructions; } /** * Set the namespace mappings. * Property names are interpreted as namespace prefixes; values are namespace URIs. * @see org.exolab.castor.xml.Marshaller#setNamespaceMapping(String, String) */ public void setNamespaceMappings(Map<String, String> namespaceMappings) { this.namespaceMappings = namespaceMappings; } /** * Set the namespace to package mappings. Property names are represents the namespaces URI, values are packages. * @see org.exolab.castor.xml.Marshaller#setNamespaceMapping(String, String) */ public void setNamespaceToPackageMapping(Map<String, String> namespaceToPackageMapping) { this.namespaceToPackageMapping = namespaceToPackageMapping; } /** * Set the {@link EntityResolver} to be used during unmarshalling. * This resolver will used to resolve system and public ids. * @see org.exolab.castor.xml.Unmarshaller#setEntityResolver(EntityResolver) */ public void setEntityResolver(EntityResolver entityResolver) { this.entityResolver = entityResolver; } /** * Set the {@link XMLClassDescriptorResolver} to be used during unmarshalling. * This resolver will used to resolve class descriptors. * @see org.exolab.castor.xml.Unmarshaller#setResolver(XMLClassDescriptorResolver) */ public void setClassDescriptorResolver(XMLClassDescriptorResolver classDescriptorResolver) { this.classDescriptorResolver = classDescriptorResolver; } /** * Set the Castor {@link IDResolver} to be used during unmarshalling. * @see org.exolab.castor.xml.Unmarshaller#setIDResolver(IDResolver) */ public void setIdResolver(IDResolver idResolver) { this.idResolver = idResolver; } /** * Set the Castor {@link ObjectFactory} to be used during unmarshalling. * @see org.exolab.castor.xml.Unmarshaller#setObjectFactory(ObjectFactory) */ public void setObjectFactory(ObjectFactory objectFactory) { this.objectFactory = objectFactory; } public void setBeanClassLoader(ClassLoader classLoader) { this.beanClassLoader = classLoader; } public void afterPropertiesSet() throws CastorMappingException, IOException { try { this.xmlContext = createXMLContext(this.mappingLocations, this.targetClasses, this.targetPackages); } catch (MappingException ex) { throw new CastorMappingException("Could not load Castor mapping", ex); } catch (ResolverException ex) { throw new CastorMappingException("Could not resolve Castor mapping", ex); } } /** * Create the Castor {@code XMLContext}. Subclasses can override this to create a custom context. * <p>The default implementation loads mapping files if defined, or the target class or packages if defined. * @return the created resolver * @throws MappingException when the mapping file cannot be loaded * @throws IOException in case of I/O errors * @see XMLContext#addMapping(org.exolab.castor.mapping.Mapping) * @see XMLContext#addClass(Class) */ protected XMLContext createXMLContext(Resource[] mappingLocations, Class[] targetClasses, String[] targetPackages) throws MappingException, ResolverException, IOException { XMLContext context = new XMLContext(); if (!ObjectUtils.isEmpty(mappingLocations)) { Mapping mapping = new Mapping(); for (Resource mappingLocation : mappingLocations) { mapping.loadMapping(SaxResourceUtils.createInputSource(mappingLocation)); } context.addMapping(mapping); } if (!ObjectUtils.isEmpty(targetClasses)) { context.addClasses(targetClasses); } if (!ObjectUtils.isEmpty(targetPackages)) { context.addPackages(targetPackages); } if (this.castorProperties != null) { for (Map.Entry<String, String> property : this.castorProperties.entrySet()) { context.setProperty(property.getKey(), property.getValue()); } } return context; } /** * Returns {@code true} for all classes, i.e. Castor supports arbitrary classes. */ public boolean supports(Class<?> clazz) { return true; } // Marshalling @Override protected final void marshalDomNode(Object graph, Node node) throws XmlMappingException { marshalSaxHandlers(graph, DomUtils.createContentHandler(node), null); } @Override protected final void marshalSaxHandlers(Object graph, ContentHandler contentHandler, LexicalHandler lexicalHandler) throws XmlMappingException { Marshaller marshaller = xmlContext.createMarshaller(); marshaller.setContentHandler(contentHandler); marshal(graph, marshaller); } @Override protected final void marshalOutputStream(Object graph, OutputStream outputStream) throws XmlMappingException, IOException { marshalWriter(graph, new OutputStreamWriter(outputStream, encoding)); } @Override protected final void marshalWriter(Object graph, Writer writer) throws XmlMappingException, IOException { Marshaller marshaller = xmlContext.createMarshaller(); marshaller.setWriter(writer); marshal(graph, marshaller); } @Override protected final void marshalXmlEventWriter(Object graph, XMLEventWriter eventWriter) throws XmlMappingException { marshalSaxHandlers(graph, StaxUtils.createContentHandler(eventWriter), null); } @Override protected final void marshalXmlStreamWriter(Object graph, XMLStreamWriter streamWriter) throws XmlMappingException { marshalSaxHandlers(graph, StaxUtils.createContentHandler(streamWriter), null); } private void marshal(Object graph, Marshaller marshaller) { try { customizeMarshaller(marshaller); marshaller.marshal(graph); } catch (XMLException ex) { throw convertCastorException(ex, true); } } /** * Template method that allows for customizing of the given Castor {@link Marshaller}. */ protected void customizeMarshaller(Marshaller marshaller) { marshaller.setValidation(this.validating); marshaller.setSuppressNamespaces(this.suppressNamespaces); marshaller.setSuppressXSIType(this.suppressXsiType); marshaller.setMarshalAsDocument(this.marshalAsDocument); marshaller.setMarshalExtendedType(this.marshalExtendedType); marshaller.setRootElement(this.rootElement); marshaller.setNoNamespaceSchemaLocation(this.noNamespaceSchemaLocation); marshaller.setSchemaLocation(this.schemaLocation); marshaller.setUseXSITypeAtRoot(this.useXSITypeAtRoot); if (this.doctypes != null) { for (Map.Entry<String, String> doctype : this.doctypes.entrySet()) { marshaller.setDoctype(doctype.getKey(), doctype.getValue()); } } if (this.processingInstructions != null) { for (Map.Entry<String, String> processingInstruction : this.processingInstructions.entrySet()) { marshaller.addProcessingInstruction(processingInstruction.getKey(), processingInstruction.getValue()); } } if (this.namespaceMappings != null) { for (Map.Entry<String, String> entry : this.namespaceMappings.entrySet()) { marshaller.setNamespaceMapping(entry.getKey(), entry.getValue()); } } } // Unmarshalling @Override protected final Object unmarshalDomNode(Node node) throws XmlMappingException { try { return createUnmarshaller().unmarshal(node); } catch (XMLException ex) { throw convertCastorException(ex, false); } } @Override protected final Object unmarshalInputStream(InputStream inputStream) throws XmlMappingException, IOException { try { return createUnmarshaller().unmarshal(new InputSource(inputStream)); } catch (XMLException ex) { throw convertCastorException(ex, false); } } @Override protected final Object unmarshalReader(Reader reader) throws XmlMappingException, IOException { try { return createUnmarshaller().unmarshal(new InputSource(reader)); } catch (XMLException ex) { throw convertCastorException(ex, false); } } @Override protected final Object unmarshalSaxReader(XMLReader xmlReader, InputSource inputSource) throws XmlMappingException, IOException { UnmarshalHandler unmarshalHandler = createUnmarshaller().createHandler(); try { ContentHandler contentHandler = Unmarshaller.getContentHandler(unmarshalHandler); xmlReader.setContentHandler(contentHandler); xmlReader.parse(inputSource); return unmarshalHandler.getObject(); } catch (SAXException ex) { throw new UnmarshallingFailureException("SAX reader exception", ex); } } @Override protected final Object unmarshalXmlEventReader(XMLEventReader eventReader) { try { return createUnmarshaller().unmarshal(eventReader); } catch (XMLException ex) { throw convertCastorException(ex, false); } } @Override protected final Object unmarshalXmlStreamReader(XMLStreamReader streamReader) { try { return createUnmarshaller().unmarshal(streamReader); } catch (XMLException ex) { throw convertCastorException(ex, false); } } private Unmarshaller createUnmarshaller() { Unmarshaller unmarshaller = this.xmlContext.createUnmarshaller(); customizeUnmarshaller(unmarshaller); return unmarshaller; } /** * Template method that allows for customizing of the given Castor {@link Unmarshaller}. */ protected void customizeUnmarshaller(Unmarshaller unmarshaller) { unmarshaller.setValidation(this.validating); unmarshaller.setWhitespacePreserve(this.whitespacePreserve); unmarshaller.setIgnoreExtraAttributes(this.ignoreExtraAttributes); unmarshaller.setIgnoreExtraElements(this.ignoreExtraElements); unmarshaller.setObject(this.rootObject); unmarshaller.setReuseObjects(this.reuseObjects); unmarshaller.setClearCollections(this.clearCollections); if (this.namespaceToPackageMapping != null) { for (Map.Entry<String, String> mapping : this.namespaceToPackageMapping.entrySet()) { unmarshaller.addNamespaceToPackageMapping(mapping.getKey(), mapping.getValue()); } } if (this.entityResolver != null) { unmarshaller.setEntityResolver(this.entityResolver); } if (this.classDescriptorResolver != null) { unmarshaller.setResolver(this.classDescriptorResolver); } if (this.idResolver != null) { unmarshaller.setIDResolver(this.idResolver); } if (this.objectFactory != null) { unmarshaller.setObjectFactory(this.objectFactory); } if (this.beanClassLoader != null) { unmarshaller.setClassLoader(this.beanClassLoader); } } /** * Convert the given {@code XMLException} to an appropriate exception from the * {@code org.springframework.oxm} hierarchy. * <p>A boolean flag is used to indicate whether this exception occurs during marshalling or * unmarshalling, since Castor itself does not make this distinction in its exception hierarchy. * @param ex Castor {@code XMLException} that occurred * @param marshalling indicates whether the exception occurs during marshalling ({@code true}), * or unmarshalling ({@code false}) * @return the corresponding {@code XmlMappingException} */ protected XmlMappingException convertCastorException(XMLException ex, boolean marshalling) { if (ex instanceof ValidationException) { return new ValidationFailureException("Castor validation exception", ex); } else if (ex instanceof MarshalException) { if (marshalling) { return new MarshallingFailureException("Castor marshalling exception", ex); } else { return new UnmarshallingFailureException("Castor unmarshalling exception", ex); } } else { // fallback return new UncategorizedMappingException("Unknown Castor exception", ex); } } }
/* * Copyright The Original Author or Authors * SPDX-License-Identifier: Apache-2.0 */ package io.jenkins.plugins.opentelemetry.semconv; /** * Java constants for the * [OpenTelemetry Environment variables conventions](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/protocol/exporter.md) * [OpenTelemetry Java SDK Autoconfigure](https://github.com/open-telemetry/opentelemetry-java/tree/main/sdk-extensions/autoconfigure) */ public class OTelEnvironmentVariablesConventions { public static final String OTEL_EXPORTER_OTLP_CERTIFICATE = "OTEL_EXPORTER_OTLP_CERTIFICATE"; public static final String OTEL_EXPORTER_OTLP_ENDPOINT = "OTEL_EXPORTER_OTLP_ENDPOINT"; public static final String OTEL_EXPORTER_OTLP_INSECURE = "OTEL_EXPORTER_OTLP_INSECURE"; public static final String OTEL_EXPORTER_OTLP_TIMEOUT = "OTEL_EXPORTER_OTLP_TIMEOUT"; public static final String OTEL_TRACES_EXPORTER = "OTEL_TRACES_EXPORTER"; public static final String SPAN_ID = "SPAN_ID"; public static final String TRACE_ID = "TRACE_ID"; }
/** * Copyright (C) Posten Norge AS * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package no.digipost.http.client; import org.apache.hc.core5.util.TimeValue; import org.apache.hc.core5.util.Timeout; /** * Eviction policy for the connections. */ public final class ConnectionEvictionPolicy { public static ConnectionEvictionPolicy NONE = null; public static ConnectionEvictionPolicy DEFAULT = closeConnectionsIdleLongerThan(60); /** * The idle timeout before evicting the connection. */ final TimeValue connectionsIdleLongerThanThreshold; private ConnectionEvictionPolicy(TimeValue closeIdleConnectionsAfter) { this.connectionsIdleLongerThanThreshold = closeIdleConnectionsAfter; } /** * * @param seconds negative to disable idle connection eviction */ public static ConnectionEvictionPolicy closeConnectionsIdleLongerThan(int seconds) { Validation.equalOrGreater(seconds, -1, "Max idle time before connection is closed."); return new ConnectionEvictionPolicy(TimeValue.ofSeconds(seconds)); } @Override public String toString() { return "ConnectionEvictionPolicy{" + "connectionsIdleLongerThanThreshold=" + connectionsIdleLongerThanThreshold + '}'; } }
package com.duo.examples.health.Normal; import org.springframework.boot.actuate.health.AbstractHealthIndicator; import org.springframework.boot.actuate.health.Health; import org.springframework.stereotype.Component; /** * @author pythias * @since 2019-06-06 */ @Component public class OtherHealthIndicator extends AbstractHealthIndicator { @Override protected void doHealthCheck(Health.Builder builder) throws Exception { // 做各种检查 builder.up(); } }
package com.netsuite.webservices.transactions.purchases.types; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlEnumValue; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for VendorReturnAuthorizationOrderStatus. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="VendorReturnAuthorizationOrderStatus"&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"&gt; * &lt;enumeration value="_cancelled"/&gt; * &lt;enumeration value="_closed"/&gt; * &lt;enumeration value="_credited"/&gt; * &lt;enumeration value="_partiallyReturned"/&gt; * &lt;enumeration value="_pendingApproval"/&gt; * &lt;enumeration value="_pendingCredit"/&gt; * &lt;enumeration value="_pendingCreditPartiallyReturned"/&gt; * &lt;enumeration value="_pendingReturn"/&gt; * &lt;enumeration value="_undefined"/&gt; * &lt;/restriction&gt; * &lt;/simpleType&gt; * </pre> * */ @XmlType(name = "VendorReturnAuthorizationOrderStatus", namespace = "urn:types.purchases_2014_2.transactions.webservices.netsuite.com") @XmlEnum public enum VendorReturnAuthorizationOrderStatus { @XmlEnumValue("_cancelled") CANCELLED("_cancelled"), @XmlEnumValue("_closed") CLOSED("_closed"), @XmlEnumValue("_credited") CREDITED("_credited"), @XmlEnumValue("_partiallyReturned") PARTIALLY_RETURNED("_partiallyReturned"), @XmlEnumValue("_pendingApproval") PENDING_APPROVAL("_pendingApproval"), @XmlEnumValue("_pendingCredit") PENDING_CREDIT("_pendingCredit"), @XmlEnumValue("_pendingCreditPartiallyReturned") PENDING_CREDIT_PARTIALLY_RETURNED("_pendingCreditPartiallyReturned"), @XmlEnumValue("_pendingReturn") PENDING_RETURN("_pendingReturn"), @XmlEnumValue("_undefined") UNDEFINED("_undefined"); private final String value; VendorReturnAuthorizationOrderStatus(String v) { value = v; } public String value() { return value; } public static VendorReturnAuthorizationOrderStatus fromValue(String v) { for (VendorReturnAuthorizationOrderStatus c: VendorReturnAuthorizationOrderStatus.values()) { if (c.value.equals(v)) { return c; } } throw new IllegalArgumentException(v); } }
package com.api.shared; import java.util.UUID; import org.springframework.stereotype.Service; @Service public class Utils { public String generateId(){ return UUID.randomUUID().toString(); } public String generatedUserId() { return this.generateId(); } }
package io.automatiko.engine.addons.persistence.db; import static io.automatiko.engine.api.workflow.ProcessInstanceReadMode.MUTABLE; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.Optional; import java.util.stream.Collectors; import javax.persistence.OptimisticLockException; import org.hibernate.StaleObjectStateException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.automatiko.engine.addons.persistence.db.model.ProcessInstanceEntity; import io.automatiko.engine.api.auth.AccessDeniedException; import io.automatiko.engine.api.runtime.process.WorkflowProcessInstance; import io.automatiko.engine.api.workflow.ConflictingVersionException; import io.automatiko.engine.api.workflow.ExportedProcessInstance; import io.automatiko.engine.api.workflow.MutableProcessInstances; import io.automatiko.engine.api.workflow.Process; import io.automatiko.engine.api.workflow.ProcessInstance; import io.automatiko.engine.api.workflow.ProcessInstanceDuplicatedException; import io.automatiko.engine.api.workflow.ProcessInstanceReadMode; import io.automatiko.engine.api.workflow.encrypt.StoredDataCodec; import io.automatiko.engine.workflow.AbstractProcess; import io.automatiko.engine.workflow.AbstractProcessInstance; import io.automatiko.engine.workflow.base.core.context.variable.VariableScope; import io.automatiko.engine.workflow.base.instance.context.variable.VariableScopeInstance; import io.automatiko.engine.workflow.base.instance.impl.ProcessInstanceImpl; import io.automatiko.engine.workflow.marshalling.ProcessInstanceMarshaller; import io.quarkus.hibernate.orm.panache.runtime.JpaOperations; public class DatabaseProcessInstances implements MutableProcessInstances<ProcessInstanceEntity> { private static final Logger LOGGER = LoggerFactory.getLogger(DatabaseProcessInstances.class); private final Process<? extends ProcessInstanceEntity> process; private final ProcessInstanceMarshaller marshaller; private final StoredDataCodec codec; private Class<? extends ProcessInstanceEntity> type; public DatabaseProcessInstances(Process<? extends ProcessInstanceEntity> process, StoredDataCodec codec) { this.process = process; this.marshaller = new ProcessInstanceMarshaller(new JacksonObjectMarshallingStrategy()); this.codec = codec; this.type = process.createModel().getClass(); } @SuppressWarnings("unchecked") @Override public Optional<ProcessInstance<ProcessInstanceEntity>> findById(String id, int status, ProcessInstanceReadMode mode) { String resolvedId = resolveId(id); Optional<ProcessInstanceEntity> found = (Optional<ProcessInstanceEntity>) JpaOperations.INSTANCE.findByIdOptional(type, resolvedId); if (found.isEmpty()) { return Optional.empty(); } ProcessInstanceEntity entity = found.get(); if (entity.state == status) { return Optional.of(unmarshallInstance(mode, entity)); } else { return Optional.empty(); } } @Override public Collection<? extends ProcessInstance<ProcessInstanceEntity>> findByIdOrTag(ProcessInstanceReadMode mode, int status, String... values) { return JpaOperations.INSTANCE .stream(type, "state = ?1 and (id in (?2) or (?2) in elements(tags)) ", status, Arrays.asList(values)) .map(e -> { try { return unmarshallInstance(mode, ((ProcessInstanceEntity) e)); } catch (AccessDeniedException ex) { return null; } }) .filter(pi -> pi != null) .collect(Collectors.toSet()); } @Override public Collection<String> locateByIdOrTag(int status, String... values) { return JpaOperations.INSTANCE .stream(type, "state = ?1 and (id in (?2) or (?2) in elements(tags)) ", status, Arrays.asList(values)) .map(e -> { return ((ProcessInstanceEntity) e).entityId; }) .collect(Collectors.toSet()); } @Override public Collection<ProcessInstance<ProcessInstanceEntity>> values(ProcessInstanceReadMode mode, int status, int page, int size) { return JpaOperations.INSTANCE.find(type, "state = ?1 ", status).page(calculatePage(page, size), size) .stream() .map(e -> { try { return unmarshallInstance(mode, ((ProcessInstanceEntity) e)); } catch (AccessDeniedException ex) { return null; } }) .filter(pi -> pi != null) .collect(Collectors.toList()); } @Override public Long size() { return JpaOperations.INSTANCE.count(type); } @SuppressWarnings("unchecked") @Override public boolean exists(String id) { String resolvedId = resolveId(id); Optional<ProcessInstanceEntity> found = (Optional<ProcessInstanceEntity>) JpaOperations.INSTANCE.findByIdOptional(type, resolvedId); return found.isPresent(); } @Override public void create(String id, ProcessInstance<ProcessInstanceEntity> instance) { store(id, instance); } @Override public void update(String id, ProcessInstance<ProcessInstanceEntity> instance) { store(id, instance); } @Override public void remove(String id, ProcessInstance<ProcessInstanceEntity> instance) { ProcessInstanceEntity entity = instance.variables(); // run persist to make sure entities of the root are stored JpaOperations.INSTANCE.persist(entity); // then delete the root one JpaOperations.INSTANCE.deleteById(type, resolveId(id, instance)); } protected void store(String id, ProcessInstance<ProcessInstanceEntity> instance) { String resolvedId = resolveId(id, instance); if (isActive(instance)) { ProcessInstanceEntity entity = instance.variables(); byte[] data = codec.encode(marshaller.marhsallProcessInstance(instance)); if (data == null) { return; } entity.content = data; entity.entityId = resolvedId; entity.name = instance.description(); entity.businessKey = instance.businessKey(); entity.processId = instance.process().id(); entity.processName = instance.process().name(); entity.processVersion = instance.process().version(); entity.startDate = instance.startDate(); entity.state = instance.status(); entity.tags = new HashSet<>(instance.tags().values()); try { JpaOperations.INSTANCE.persist(entity); } catch (OptimisticLockException | StaleObjectStateException e) { throw new ConflictingVersionException("Process instance with id '" + instance.id() + "' has older version than tha stored one"); } finally { disconnect(instance); } } } protected void disconnect(ProcessInstance<ProcessInstanceEntity> instance) { ((AbstractProcessInstance<?>) instance).internalRemoveProcessInstance(() -> { try { ProcessInstanceEntity entity = (ProcessInstanceEntity) JpaOperations.INSTANCE.findById(type, resolveId(instance.id(), instance)); byte[] reloaded = codec.decode(entity.content); WorkflowProcessInstance wpi = marshaller.unmarshallWorkflowProcessInstance(reloaded, process); entity.toMap().forEach((k, v) -> { if (v != null) { v.toString(); VariableScopeInstance variableScopeInstance = (VariableScopeInstance) ((ProcessInstanceImpl) wpi) .getContextInstance(VariableScope.VARIABLE_SCOPE); variableScopeInstance.internalSetVariable(k, v); } }); return wpi; } catch (RuntimeException e) { LOGGER.error("Unexpected exception thrown when reloading process instance {}", instance.id(), e); return null; } }); } @SuppressWarnings("unchecked") protected ProcessInstance<ProcessInstanceEntity> unmarshallInstance(ProcessInstanceReadMode mode, ProcessInstanceEntity entity) { ProcessInstance<ProcessInstanceEntity> pi; if (mode == MUTABLE) { WorkflowProcessInstance wpi = marshaller.unmarshallWorkflowProcessInstance(codec.decode(entity.content), process); entity.toMap().forEach((k, v) -> { if (v != null) { v.toString(); VariableScopeInstance variableScopeInstance = (VariableScopeInstance) ((ProcessInstanceImpl) wpi) .getContextInstance(VariableScope.VARIABLE_SCOPE); variableScopeInstance.internalSetVariable(k, v); } }); pi = ((AbstractProcess<ProcessInstanceEntity>) process).createInstance(wpi, entity, entity.version); } else { WorkflowProcessInstance wpi = marshaller.unmarshallWorkflowProcessInstance(codec.decode(entity.content), process); entity.toMap().forEach((k, v) -> { if (v != null) { v.toString(); VariableScopeInstance variableScopeInstance = (VariableScopeInstance) ((ProcessInstanceImpl) wpi) .getContextInstance(VariableScope.VARIABLE_SCOPE); variableScopeInstance.internalSetVariable(k, v); } }); pi = ((AbstractProcess<ProcessInstanceEntity>) process).createReadOnlyInstance(wpi, entity); } return pi; } @Override public ExportedProcessInstance exportInstance(ProcessInstance<?> instance, boolean abort) { ExportedProcessInstance exported = marshaller.exportProcessInstance(instance); if (abort) { instance.abort(); } return exported; } @Override public ProcessInstance importInstance(ExportedProcessInstance instance, Process process) { ProcessInstance imported = marshaller.importProcessInstance(instance, process); if (exists(imported.id())) { throw new ProcessInstanceDuplicatedException(imported.id()); } create(imported.id(), imported); return imported; } }
/** * Copyright 2017 Pivotal Software, Inc. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.micrometer.spring.autoconfigure.export.appoptics; import io.micrometer.appoptics.AppOpticsConfig; import io.micrometer.appoptics.AppOpticsMeterRegistry; import io.micrometer.core.instrument.Clock; import io.micrometer.spring.autoconfigure.CompositeMeterRegistryAutoConfiguration; import io.micrometer.spring.autoconfigure.MetricsAutoConfiguration; import io.micrometer.spring.autoconfigure.export.StringToDurationConverter; import io.micrometer.spring.autoconfigure.export.simple.SimpleMetricsExportAutoConfiguration; import org.springframework.boot.autoconfigure.AutoConfigureAfter; import org.springframework.boot.autoconfigure.AutoConfigureBefore; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; /** * Configuration for exporting metrics to AppOptics. * * @author Hunter Sherman */ @Configuration @AutoConfigureBefore({CompositeMeterRegistryAutoConfiguration.class, SimpleMetricsExportAutoConfiguration.class}) @AutoConfigureAfter(MetricsAutoConfiguration.class) @ConditionalOnBean(Clock.class) @ConditionalOnClass(AppOpticsMeterRegistry.class) @ConditionalOnProperty(prefix = "management.metrics.export.appoptics", name = "enabled", havingValue = "true", matchIfMissing = true) @EnableConfigurationProperties(AppOpticsProperties.class) @Import(StringToDurationConverter.class) public class AppOpticsMetricsExportAutoConfiguration { @Bean @ConditionalOnMissingBean(AppOpticsConfig.class) public AppOpticsConfig appOpticsConfig(AppOpticsProperties appOpticsProperties) { return new AppOpticsPropertiesConfigAdapter(appOpticsProperties); } @Bean @ConditionalOnMissingBean public AppOpticsMeterRegistry appOpticsMeterRegistry(AppOpticsConfig config, Clock clock) { return new AppOpticsMeterRegistry(config, clock); } }
package com.atguigu.gmall.sms.service; import com.baomidou.mybatisplus.extension.service.IService; import com.atguigu.gmall.common.bean.PageResultVo; import com.atguigu.gmall.common.bean.PageParamVo; import com.atguigu.gmall.sms.entity.SeckillSkuEntity; import java.util.Map; /** * 秒杀活动商品关联 * * @author cbz * @email fengge@atguigu.com * @date 2021-11-30 21:18:41 */ public interface SeckillSkuService extends IService<SeckillSkuEntity> { PageResultVo queryPage(PageParamVo paramVo); }
/* * Copyright (c) 2015-present, Facebook, Inc. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.imagepipeline.datasource; import com.facebook.common.references.CloseableReference; import com.facebook.datasource.DataSource; import com.facebook.imagepipeline.listener.RequestListener; import com.facebook.imagepipeline.producers.Producer; import com.facebook.imagepipeline.producers.SettableProducerContext; import com.facebook.imagepipeline.systrace.FrescoSystrace; import javax.annotation.Nullable; import javax.annotation.concurrent.ThreadSafe; /** * DataSource<CloseableReference<T>> backed by a Producer<CloseableReference<T>> * * @param <T> */ @ThreadSafe public class CloseableProducerToDataSourceAdapter<T> extends AbstractProducerToDataSourceAdapter<CloseableReference<T>> { public static <T> DataSource<CloseableReference<T>> create( Producer<CloseableReference<T>> producer, SettableProducerContext settableProducerContext, RequestListener listener) { FrescoSystrace.beginSection("CloseableProducerToDataSourceAdapter#create"); CloseableProducerToDataSourceAdapter<T> result = new CloseableProducerToDataSourceAdapter<T>( producer, settableProducerContext, listener); FrescoSystrace.endSection(); return result; } private CloseableProducerToDataSourceAdapter( Producer<CloseableReference<T>> producer, SettableProducerContext settableProducerContext, RequestListener listener) { super(producer, settableProducerContext, listener); } @Override @Nullable public CloseableReference<T> getResult() { return CloseableReference.cloneOrNull(super.getResult()); } @Override protected void closeResult(CloseableReference<T> result) { CloseableReference.closeSafely(result); } @Override protected void onNewResultImpl(CloseableReference<T> result, int status) { super.onNewResultImpl(CloseableReference.cloneOrNull(result), status); } }
package org.incode.module.communications.dom.mixins; import java.io.IOException; import java.util.List; import java.util.Set; import javax.inject.Inject; import org.apache.isis.applib.ApplicationException; import org.apache.isis.applib.annotation.Action; import org.apache.isis.applib.annotation.ActionLayout; import org.apache.isis.applib.annotation.Contributed; import org.apache.isis.applib.annotation.Mixin; import org.apache.isis.applib.annotation.Optionality; import org.apache.isis.applib.annotation.Parameter; import org.apache.isis.applib.annotation.ParameterLayout; import org.apache.isis.applib.annotation.SemanticsOf; import org.apache.isis.applib.services.background.BackgroundService2; import org.apache.isis.applib.services.email.EmailService; import org.apache.isis.applib.services.factory.FactoryService; import org.apache.isis.applib.services.queryresultscache.QueryResultsCache; import org.apache.isis.applib.services.xactn.TransactionService; import org.incode.module.communications.dom.impl.commchannel.CommunicationChannel; import org.incode.module.communications.dom.impl.commchannel.EmailAddress; import org.incode.module.communications.dom.impl.comms.Communication; import org.incode.module.communications.dom.impl.comms.CommunicationRepository; import org.incode.module.communications.dom.spi.CommHeaderForEmail; import org.incode.module.communications.dom.spi.DocumentCommunicationSupport; import org.incode.module.document.DocumentModule; import org.incode.module.document.dom.impl.docs.Document; import org.incode.module.document.dom.impl.docs.DocumentState; import org.incode.module.document.dom.impl.docs.DocumentTemplate; import org.incode.module.document.dom.impl.docs.DocumentTemplateRepository; import org.incode.module.document.dom.impl.paperclips.PaperclipRepository; import org.incode.module.document.dom.impl.types.DocumentType; import org.incode.module.document.dom.services.DocumentCreatorService; /** * Provides the ability to send an email. */ @Mixin(method = "act") public class Document_sendByEmail { private final Document document; public Document_sendByEmail(final Document document) { this.document = document; } public static class ActionDomainEvent extends DocumentModule.ActionDomainEvent<Document_sendByEmail> { } @Action( semantics = SemanticsOf.NON_IDEMPOTENT, domainEvent = ActionDomainEvent.class ) @ActionLayout( cssClassFa = "at", contributed = Contributed.AS_ACTION ) public Communication act( @ParameterLayout(named = "to:") final EmailAddress toChannel, @Parameter( optionality = Optionality.OPTIONAL, maxLength = CommunicationChannel.EmailType.Meta.MAX_LEN, regexPattern = CommunicationChannel.EmailType.Meta.REGEX, regexPatternReplacement = CommunicationChannel.EmailType.Meta.REGEX_DESC) @ParameterLayout(named = "cc:") final String cc, @Parameter( optionality = Optionality.OPTIONAL, maxLength = CommunicationChannel.EmailType.Meta.MAX_LEN, regexPattern = CommunicationChannel.EmailType.Meta.REGEX, regexPatternReplacement = CommunicationChannel.EmailType.Meta.REGEX_DESC) @ParameterLayout(named = "cc (2):") final String cc2, @Parameter( optionality = Optionality.OPTIONAL, maxLength = CommunicationChannel.EmailType.Meta.MAX_LEN, regexPattern = CommunicationChannel.EmailType.Meta.REGEX, regexPatternReplacement = CommunicationChannel.EmailType.Meta.REGEX_DESC) @ParameterLayout(named = "cc (3):") final String cc3, @Parameter( optionality = Optionality.OPTIONAL, maxLength = CommunicationChannel.EmailType.Meta.MAX_LEN, regexPattern = CommunicationChannel.EmailType.Meta.REGEX, regexPatternReplacement = CommunicationChannel.EmailType.Meta.REGEX_DESC) @ParameterLayout(named = "bcc:") final String bcc, @Parameter( optionality = Optionality.OPTIONAL, maxLength = CommunicationChannel.EmailType.Meta.MAX_LEN, regexPattern = CommunicationChannel.EmailType.Meta.REGEX, regexPatternReplacement = CommunicationChannel.EmailType.Meta.REGEX_DESC) @ParameterLayout(named = "bcc (2):") final String bcc2 ) throws IOException { if(this.document.getState() == DocumentState.NOT_RENDERED) { // this shouldn't happen, but want to fail-fast in case a future programmer calls this directly throw new IllegalArgumentException("Document is not yet rendered"); } // create cover note // // nb: there is a presumption is that the cover note will not be automatically attached to any other objects, // ie its AttachmentAdvisor should be AttachToNone. final DocumentTemplate coverNoteTemplate = determineEmailCoverNoteTemplate(); final Document coverNoteDoc = documentCreatorService.createDocumentAndAttachPaperclips(this.document, coverNoteTemplate); coverNoteDoc.render(coverNoteTemplate, this.document); // create comm and correspondents final String atPath = document.getAtPath(); final String subject = stripFileExtensionIfAny(coverNoteDoc.getName()); final Communication communication = communicationRepository.createEmail(subject, atPath, toChannel, cc, cc2, cc3, bcc, bcc2); transactionService.flushTransaction(); // manually attach the cover note to the comm paperclipRepository.attach(coverNoteDoc, DocumentConstants.PAPERCLIP_ROLE_COVER, communication); // also attach this "primary" document to the comm paperclipRepository.attach(this.document, DocumentConstants.PAPERCLIP_ROLE_PRIMARY, communication); // also copy over as attachments to the comm anything else also attached to primary document final List<Document> communicationAttachments = attachmentProvider.attachmentsFor(document); for (Document communicationAttachment : communicationAttachments) { paperclipRepository.attach(communicationAttachment, DocumentConstants.PAPERCLIP_ROLE_ATTACHMENT, communication); } transactionService.flushTransaction(); // finally, schedule the email to be sent communication.scheduleSend(); return communication; } public String disableAct() { if (emailService == null || !emailService.isConfigured()) { return "Email service not configured"; } if (document.getState() != DocumentState.RENDERED) { return "Document not yet rendered"; } if(determineEmailCoverNoteTemplateElseNull() == null) { return "Email cover note type/template not provided"; } if(determineEmailHeader().getDisabledReason() != null) { return determineEmailHeader().getDisabledReason(); } if(choices0Act().isEmpty()) { return "Could not locate any email address(es) to sent to"; } return null; } public EmailAddress default0Act() { final EmailAddress toDefault = determineEmailHeader().getToDefault(); if (toDefault != null) { return toDefault; } final Set<EmailAddress> choices = choices0Act(); return choices.isEmpty() ? null : choices.iterator().next(); } public Set<EmailAddress> choices0Act() { return determineEmailHeader().getToChoices(); } public String default1Act() { return determineEmailHeader().getCc(); } public String default4Act() { return determineEmailHeader().getBcc(); } private DocumentTemplate determineEmailCoverNoteTemplate() { DocumentTemplate template = determineEmailCoverNoteTemplateElseNull(); if(template == null) { throw new ApplicationException("Could not locate an email cover note template."); } return template; } private DocumentTemplate determineEmailCoverNoteTemplateElseNull() { final DocumentType coverNoteDocumentType = determineEmailCoverNoteDocumentType(); if(coverNoteDocumentType == null) { return null; } return documentTemplateRepository.findFirstByTypeAndApplicableToAtPath(coverNoteDocumentType, document.getAtPath()); } private DocumentType determineEmailCoverNoteDocumentType() { final DocumentType docType = queryResultsCache.execute(() -> { if (documentCommunicationSupports != null) { for (DocumentCommunicationSupport supportService : documentCommunicationSupports) { final DocumentType documentType = supportService.emailCoverNoteDocumentTypeFor(document); if (documentType != null) { return documentType; } } } return null; }, Document_sendByEmail.class, "determineEmailCoverNoteDocumentType", document); return docType; } private CommHeaderForEmail determineEmailHeader() { return queryResultsCache.execute(() -> { final CommHeaderForEmail header = new CommHeaderForEmail(); if(documentCommunicationSupports != null) { for (DocumentCommunicationSupport emailSupport : documentCommunicationSupports) { emailSupport.inferEmailHeaderFor(document, header); } if(header.getToChoices().isEmpty()) { header.setDisabledReason("Could not find a communication channel to use"); } } return header; }, Document_sendByEmail.class, "determineEmailHeader", document); } // bit of a hack... private static String stripFileExtensionIfAny(final String name) { final int suffix = name.lastIndexOf(".html"); return suffix == -1 ? name : name.substring(0, suffix); } @Inject Document_communicationAttachments.Provider attachmentProvider; @Inject QueryResultsCache queryResultsCache; @Inject TransactionService transactionService; @Inject List<DocumentCommunicationSupport> documentCommunicationSupports; @Inject DocumentTemplateRepository documentTemplateRepository; @Inject CommunicationRepository communicationRepository; @Inject PaperclipRepository paperclipRepository; @Inject EmailService emailService; @Inject BackgroundService2 backgroundService; @Inject DocumentCreatorService documentCreatorService; @Inject FactoryService factoryService; }
/* * Copyright (c) 2012 - 2015, Internet Corporation for Assigned Names and * Numbers (ICANN) and China Internet Network Information Center (CNNIC) * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the ICANN, CNNIC nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL ICANN OR CNNIC BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH * DAMAGE. */ package org.restfulwhois.rdap.common.dao.impl; import java.util.ArrayList; import java.util.List; import javax.annotation.Resource; import org.restfulwhois.rdap.common.dao.ConformanceDao; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Repository; /** * conformance query DAO mainly select conformance information from * spring-initData-rdapConformance.xml. * <p> * these information will be valid after restart the system. * * @author tianhongqiang * */ @Repository public class ConformanceFromFileDaoImpl implements ConformanceDao { /** * logger. */ protected static final Logger LOGGER = LoggerFactory .getLogger(ConformanceFromFileDaoImpl.class); /** * rdapConformanceList. */ @Resource(name = "rdapConformanceList") private List<String> rdapConformanceList; /** * Query rdapConformance data information. * * @return List<String> resultList */ @Override public List<String> queryConformance() { List<String> resultList = new ArrayList<String>(); for (int i = 0; i < rdapConformanceList.size(); i++) { resultList.add(rdapConformanceList.get(i)); } return resultList; } }
package com.acme; import com.acme.ircutils.MessageSplitter; import com.acme.processors.IrcMessageProcessor; import com.acme.processors.TelegramMessageProcessor; import com.acme.telegramutils.TelegramUtils; import org.apache.camel.RuntimeCamelException; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.irc.IrcMessage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; @Component public class MainRoutes extends RouteBuilder { private final Logger LOG = LoggerFactory.getLogger(getClass()); private static final String FROM_IRC_ROUTE_ID = "from-irc"; private static final String FROM_TELEGRAM_ROUTE_ID = "from-telegram"; private final String ircUri; private final String telegramUri; private final TelegramUtils telegramUtils; private final MessageSplitter messageSplitter; private final ChannelGroupMapper channelGroupMappings; @Autowired public MainRoutes(TelegramUtils telegramUtils, MessageSplitter messageSplitter, ChannelGroupMapper channelGroupMappings, @Value("${irc.uri}") String ircUri, @Value("${telegram.uri}") String telegramUri) { this.telegramUtils = telegramUtils; this.messageSplitter = messageSplitter; this.channelGroupMappings = channelGroupMappings; this.ircUri = ircUri; this.telegramUri = telegramUri; } @Override public void configure() throws Exception { onException(RuntimeCamelException.class) .process(exchange -> { exchange.getOut().setBody(exchange.getIn().getBody()); exchange.getOut().setHeaders(exchange.getIn().getHeaders()); LOG.info("Stopping route {}", FROM_IRC_ROUTE_ID); exchange.getContext().stopRoute(FROM_IRC_ROUTE_ID); LOG.info("Starting route {}", FROM_IRC_ROUTE_ID); exchange.getContext().startRoute(FROM_IRC_ROUTE_ID); }) .delay(1000 * 60 * 2); from(ircUri) .routeId(FROM_IRC_ROUTE_ID) .choice() .when(header("irc.messageType").isEqualToIgnoreCase("PRIVMSG")) .process(new IrcMessageProcessor(channelGroupMappings)) .to(telegramUri) .log("[IRC] -> Telegram delivered") .otherwise() .process(exchange -> { exchange.getOut().setBody(exchange.getIn().getBody()); exchange.getOut().setHeaders(exchange.getIn().getHeaders()); try { IrcMessage ircMsg = exchange.getOut().getBody(IrcMessage.class); LOG.info("{} {}", ircMsg.getMessageType(), ircMsg.getMessage()); } catch (Exception e) { LOG.warn("Couldn't parse IRC message: {}", e.getMessage()); } }) .end(); from(telegramUri) .routeId(FROM_TELEGRAM_ROUTE_ID) .log("[Telegram] Incoming message") .process(new TelegramMessageProcessor(channelGroupMappings, telegramUtils, messageSplitter)) .split(body()) .to(ircUri) .log("[Telegram] -> IRC delivered"); } }
import greenfoot.*; // (World, Actor, GreenfootImage, Greenfoot and MouseInfo) /** * Write a description of class Portal here. * * @author (your name) * @version (a version number or a date) */ public class Portal extends Actor { /** * Act - do whatever the Portal wants to do. This method is called whenever * the 'Act' or 'Run' button gets pressed in the environment. */ public void act() { Actor player = getOneIntersectingObject(Player.class); if(player!=null){ int gold = ((SuperWorld)getWorld()).score.getValue(); int p1 = ((SuperWorld)getWorld()).powerCounter.getValue(); int p2 = ((SuperWorld)getWorld()).powerCounter2.getValue(); int screen = ((SuperWorld)getWorld()).screen; Greenfoot.playSound("portal.wav"); if(screen==9)Greenfoot.setWorld(new World10(gold, p1, p2, ++screen)); else if(screen==10)Greenfoot.setWorld(new World11(gold, p1, p2, ++screen)); else if(screen==11)Greenfoot.setWorld(new World12(gold, p1, p2, ++screen)); else if(screen==12)Greenfoot.setWorld(new GameOver()); } // Add your action code here. } }
/******************************************************************************* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package com.google.cloud.dataflow.sdk.runners.worker; import com.google.cloud.dataflow.sdk.coders.Coder; import com.google.cloud.dataflow.sdk.options.PipelineOptions; import com.google.cloud.dataflow.sdk.util.CloudObject; import com.google.cloud.dataflow.sdk.util.ExecutionContext; import com.google.cloud.dataflow.sdk.util.InstanceBuilder; import com.google.cloud.dataflow.sdk.util.Serializer; import com.google.cloud.dataflow.sdk.util.common.worker.Sink; import com.google.common.reflect.TypeToken; import java.util.HashMap; import java.util.Map; /** * Constructs a Sink from a Dataflow service protocol Sink definition. * * <p> A SinkFactory concrete "subclass" should define a method with the * following signature: * <pre> {@code * static SomeSinkSubclass<T> create(PipelineOptions, CloudObject, * Coder<T>, ExecutionContext); * } </pre> */ public final class SinkFactory { // Do not instantiate. private SinkFactory() {} /** * A map from the short names of predefined sinks to their full * factory class names. */ static Map<String, String> predefinedSinkFactories = new HashMap<>(); static { predefinedSinkFactories.put("TextSink", TextSinkFactory.class.getName()); predefinedSinkFactories.put("AvroSink", AvroSinkFactory.class.getName()); predefinedSinkFactories.put("ShuffleSink", ShuffleSinkFactory.class.getName()); predefinedSinkFactories.put("PubsubSink", PubsubSink.class.getName()); predefinedSinkFactories.put("WindmillSink", WindmillSink.class.getName()); } /** * Creates a {@link Sink} from a Dataflow API Sink definition. * * @throws Exception if the sink could not be decoded and * constructed */ @SuppressWarnings("serial") public static <T> Sink<T> create( PipelineOptions options, com.google.api.services.dataflow.model.Sink cloudSink, ExecutionContext executionContext) throws Exception { Coder<T> coder = Serializer.deserialize(cloudSink.getCodec(), Coder.class); CloudObject object = CloudObject.fromSpec(cloudSink.getSpec()); String className = predefinedSinkFactories.get(object.getClassName()); if (className == null) { className = object.getClassName(); } try { return InstanceBuilder.ofType(new TypeToken<Sink<T>>() {}) .fromClassName(className) .fromFactoryMethod("create") .withArg(PipelineOptions.class, options) .withArg(CloudObject.class, object) .withArg(Coder.class, coder) .withArg(ExecutionContext.class, executionContext) .build(); } catch (ClassNotFoundException exn) { throw new Exception( "unable to create a sink from " + cloudSink, exn); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos; import java.util.HashMap; import java.util.Map; /** * Represents the consistency levels supported for Cosmos DB client operations in the Azure Cosmos DB database service. * <p> * The requested ConsistencyLevel must match or be weaker than that provisioned for the database account. Consistency * levels by order of strength are STRONG, BOUNDED_STALENESS, SESSION and EVENTUAL. */ public enum ConsistencyLevel { /** * STRONG Consistency guarantees that read operations always return the value that was last written. */ STRONG("Strong"), /** * Bounded Staleness guarantees that reads are not too out-of-date. This can be configured based on number of * operations (MaxStalenessPrefix) or time (MaxStalenessIntervalInSeconds) */ BOUNDED_STALENESS("BoundedStaleness"), /** * SESSION Consistency guarantees monotonic reads (you never read old data, then new, then old again), monotonic * writes (writes are ordered) and read your writes (your writes are immediately visible to your reads) within * any single session. */ SESSION("Session"), /** * EVENTUAL Consistency guarantees that reads will return a subset of writes. ALL writes will be eventually be * available for reads. */ EVENTUAL("Eventual"), /** * CONSISTENT_PREFIX Consistency guarantees that reads will return some prefix of all writes with no gaps. ALL * writes * will be eventually be available for reads. */ CONSISTENT_PREFIX("ConsistentPrefix"); private static Map<String, ConsistencyLevel> consistencyLevelHashMap = new HashMap<>(); static { for (ConsistencyLevel cl : ConsistencyLevel.values()) { consistencyLevelHashMap.put(cl.toString(), cl); } } private final String overWireValue; ConsistencyLevel(String overWireValue) { this.overWireValue = overWireValue; } /** * Given the over wire version of ConsistencyLevel gives the corresponding enum or return null * * @param consistencyLevel String value of consistency level * @return ConsistencyLevel Enum consistency level */ public static ConsistencyLevel fromServiceSerializedFormat(String consistencyLevel) { // this is 100x faster than org.apache.commons.lang3.EnumUtils.getEnum(String) // for more detail refer to https://github.com/moderakh/azure-cosmosdb-benchmark return consistencyLevelHashMap.get(consistencyLevel); } @Override public String toString() { return this.overWireValue; } }
package de.lancom.systems.stomp.spring; import de.lancom.systems.defer.Promise; import de.lancom.systems.stomp.core.connection.StompFrameContext; /** * Stomp producer interface. * * @param <T> value type */ public interface StompProducer<T> { /** * Send stomp frame. * * @param value value * @return stomp exchange */ Promise<StompFrameContext> send(T value); }
package com.libbytian.pan.findmovie.aidianying; import com.baomidou.mybatisplus.extension.service.IService; import com.libbytian.pan.system.model.MovieNameAndUrlModel; import org.springframework.cache.annotation.CacheConfig; import org.springframework.cache.annotation.Cacheable; import java.util.List; @CacheConfig(cacheNames = "aidianying") public interface IFindMovieInAiDianYing extends IService<MovieNameAndUrlModel> { @Cacheable(key = "#movieName", condition = "#movieName != null") List<MovieNameAndUrlModel> findMovieUrl(String movieName) throws Exception; }
/* * Copyright (C) 2018-2022 Thomas Akehurst * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.tomakehurst.wiremock.extension.requestfilter; import static com.github.tomakehurst.wiremock.common.Encoding.encodeBase64; import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.collect.FluentIterable.from; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static org.apache.commons.lang3.StringUtils.countMatches; import static org.apache.commons.lang3.StringUtils.ordinalIndexOf; import com.github.tomakehurst.wiremock.http.*; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.google.common.collect.ImmutableMap; import java.util.*; public class RequestWrapper implements Request { private final Request delegate; private final RequestMethod method; private final FieldTransformer<String> absoluteUrlTransformer; private final List<HttpHeader> addedHeaders; private final List<String> removedHeaders; private final Map<CaseInsensitiveKey, FieldTransformer<List<String>>> headerTransformers; private final Map<String, Cookie> additionalCookies; private final List<String> cookiesToRemove; private final Map<String, FieldTransformer<Cookie>> cookieTransformers; private final FieldTransformer<Body> bodyTransformer; private final FieldTransformer<Part> multipartTransformer; public RequestWrapper(Request delegate) { this( delegate, null, null, Collections.<HttpHeader>emptyList(), Collections.<String>emptyList(), Collections.<CaseInsensitiveKey, FieldTransformer<List<String>>>emptyMap(), Collections.<String, Cookie>emptyMap(), Collections.<String>emptyList(), Collections.<String, FieldTransformer<Cookie>>emptyMap(), null, null); } public RequestWrapper( Request delegate, RequestMethod method, FieldTransformer<String> absoluteUrlTransformer, List<HttpHeader> addedHeaders, List<String> removedHeaders, Map<CaseInsensitiveKey, FieldTransformer<List<String>>> headerTransformers, Map<String, Cookie> additionalCookies, List<String> cookiesToRemove, Map<String, FieldTransformer<Cookie>> cookieTransformers, FieldTransformer<Body> bodyTransformer, FieldTransformer<Part> multipartTransformer) { this.delegate = delegate; this.method = method; this.absoluteUrlTransformer = absoluteUrlTransformer; this.addedHeaders = addedHeaders; this.removedHeaders = removedHeaders; this.headerTransformers = headerTransformers; this.additionalCookies = additionalCookies; this.cookiesToRemove = cookiesToRemove; this.cookieTransformers = cookieTransformers; this.bodyTransformer = bodyTransformer; this.multipartTransformer = multipartTransformer; } public static Builder create() { return new Builder(); } @Override public String getUrl() { String absoluteUrl = getAbsoluteUrl(); int relativeStartIndex = countMatches(absoluteUrl, "/") >= 3 ? ordinalIndexOf(absoluteUrl, "/", 3) : absoluteUrl.length(); return absoluteUrl.substring(relativeStartIndex); } @Override public String getAbsoluteUrl() { if (absoluteUrlTransformer != null) { return absoluteUrlTransformer.transform(delegate.getAbsoluteUrl()); } return delegate.getAbsoluteUrl(); } @Override public RequestMethod getMethod() { return firstNonNull(method, delegate.getMethod()); } @Override public String getScheme() { return delegate.getScheme(); } @Override public String getHost() { return delegate.getHost(); } @Override public int getPort() { return delegate.getPort(); } @Override public String getClientIp() { return delegate.getClientIp(); } @Override public String getHeader(String key) { return getHeaders().getHeader(key).firstValue(); } @Override public HttpHeader header(String key) { return getHeaders().getHeader(key); } @Override public ContentTypeHeader contentTypeHeader() { return delegate.contentTypeHeader(); } @Override public HttpHeaders getHeaders() { Collection<HttpHeader> existingHeaders = delegate.getHeaders().all(); List<HttpHeader> combinedHeaders = from(existingHeaders) .append(addedHeaders) .filter( new Predicate<HttpHeader>() { @Override public boolean apply(HttpHeader httpHeader) { return !removedHeaders.contains(httpHeader.key()); } }) .transform( new Function<HttpHeader, HttpHeader>() { @Override public HttpHeader apply(HttpHeader httpHeader) { if (headerTransformers.containsKey(httpHeader.caseInsensitiveKey())) { FieldTransformer<List<String>> transformer = headerTransformers.get(httpHeader.caseInsensitiveKey()); List<String> newValues = transformer.transform(httpHeader.values()); return new HttpHeader(httpHeader.key(), newValues); } return httpHeader; } }) .toList(); return new HttpHeaders(combinedHeaders); } @Override public boolean containsHeader(String key) { return getHeaders().keys().contains(key); } @Override public Set<String> getAllHeaderKeys() { return getHeaders().keys(); } @Override public Map<String, Cookie> getCookies() { ImmutableMap.Builder<String, Cookie> builder = ImmutableMap.builder(); for (Map.Entry<String, Cookie> entry : delegate.getCookies().entrySet()) { Cookie newCookie = cookieTransformers.containsKey(entry.getKey()) ? cookieTransformers.get(entry.getKey()).transform(entry.getValue()) : entry.getValue(); if (!cookiesToRemove.contains(entry.getKey())) { builder.put(entry.getKey(), newCookie); } } builder.putAll(additionalCookies); return builder.build(); } @Override public QueryParameter queryParameter(String key) { return delegate.queryParameter(key); } @Override public byte[] getBody() { if (bodyTransformer != null) { return bodyTransformer.transform(new Body(delegate.getBody())).asBytes(); } return delegate.getBody(); } @Override public String getBodyAsString() { if (bodyTransformer != null) { return bodyTransformer.transform(new Body(delegate.getBodyAsString())).asString(); } return delegate.getBodyAsString(); } @Override public String getBodyAsBase64() { return encodeBase64(getBody()); } @Override public boolean isMultipart() { return delegate.isMultipart(); } @Override public Collection<Part> getParts() { if (delegate.getParts() == null || multipartTransformer == null) { return delegate.getParts(); } return from(delegate.getParts()) .transform( new Function<Part, Part>() { @Override public Part apply(Part part) { return multipartTransformer.transform(part); } }) .toList(); } @Override public Part getPart(String name) { if (multipartTransformer != null) { return multipartTransformer.transform(delegate.getPart(name)); } return delegate.getPart(name); } @Override public boolean isBrowserProxyRequest() { return delegate.isBrowserProxyRequest(); } @Override public Optional<Request> getOriginalRequest() { return delegate.getOriginalRequest(); } @Override public String getProtocol() { return delegate.getProtocol(); } public static class Builder { private RequestMethod requestMethod; private FieldTransformer<String> absoluteUrlTransformer; private final List<HttpHeader> additionalHeaders = newArrayList(); private final List<String> headersToRemove = newArrayList(); private final Map<CaseInsensitiveKey, FieldTransformer<List<String>>> headerTransformers = newHashMap(); private final Map<String, Cookie> additionalCookies = newHashMap(); private final List<String> cookiesToRemove = newArrayList(); private final Map<String, FieldTransformer<Cookie>> cookieTransformers = newHashMap(); private FieldTransformer<Body> bodyTransformer; private FieldTransformer<Part> mutlipartTransformer; public Builder addHeader(String key, String... values) { additionalHeaders.add(new HttpHeader(key, values)); return this; } public Builder removeHeader(String key) { headersToRemove.add(key); return this; } public Builder transformHeader(String key, FieldTransformer<List<String>> transformer) { headerTransformers.put(CaseInsensitiveKey.from(key), transformer); return this; } public Builder setMethod(RequestMethod method) { requestMethod = method; return this; } public Builder transformAbsoluteUrl(FieldTransformer<String> transformer) { absoluteUrlTransformer = transformer; return this; } public Request wrap(Request request) { return new RequestWrapper( request, requestMethod, absoluteUrlTransformer, additionalHeaders, headersToRemove, headerTransformers, additionalCookies, cookiesToRemove, cookieTransformers, bodyTransformer, mutlipartTransformer); } public Builder transformBody(FieldTransformer<Body> transformer) { bodyTransformer = transformer; return this; } public Builder transformCookie(String name, FieldTransformer<Cookie> transformer) { cookieTransformers.put(name, transformer); return this; } public Builder transformParts(FieldTransformer<Part> transformer) { mutlipartTransformer = transformer; return this; } public Builder addCookie(String name, Cookie value) { additionalCookies.put(name, value); return this; } public Builder removeCookie(String name) { cookiesToRemove.add(name); return this; } } }
package jeremy.meitu.classify; import android.os.Bundle; import android.support.design.widget.TabLayout; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.content.ContextCompat; import android.support.v4.view.ViewPager; import android.view.LayoutInflater; import android.view.View; import android.widget.TextView; import java.util.ArrayList; import java.util.List; import jeremy.meitu.R; import jeremy.meitu.base.BaseFragment; /** * Created by JIANGJIAN650 on 2018/5/21. */ public class BDMainClassifyFram extends BaseFragment { private TabLayout tl; private ViewPager vp; //当标签数目小于等于4个时,标签栏不可滑动 public static final int MOVABLE_COUNT = 4; private String[] tabs = new String[]{"美女", "壁纸", "明星", "搞笑", "动漫", "宠物"}; private String[] tags = new String[]{"全部", "全部", "全部", "全部", "全部", "全部"}; private List<Fragment> fragments; @Override protected int setView() { return R.layout.frag_bd_main; } @Override protected void init(View view) { tl = findViewById(view, R.id.tl); vp = findViewById(view, R.id.vp); initDatas(); initViewPager(); initTabLayout(); } @Override protected void initData(Bundle savedInstanceState) { } @Override protected void onFragmentVisibleChange(boolean isVisible) { } @Override protected void onFragmentFirstVisible() { } private void initTabLayout() { //MODE_FIXED标签栏不可滑动,各个标签会平分屏幕的宽度 tl.setTabMode(tabs.length <= MOVABLE_COUNT ? TabLayout.MODE_FIXED : TabLayout.MODE_SCROLLABLE); //指示条的颜色 tl.setSelectedTabIndicatorColor(ContextCompat.getColor(getContext(), android.R.color.holo_blue_dark)); tl.setSelectedTabIndicatorHeight((int) getResources().getDimension(R.dimen.indicatorHeight)); //关联tabLayout和ViewPager,两者的选择和滑动状态会相互影响 tl.setupWithViewPager(vp); //自定义标签布局 for (int i = 0; i < tabs.length; i++) { TabLayout.Tab tab = tl.getTabAt(i); TextView tv = (TextView) LayoutInflater.from(getContext()).inflate(R.layout.tabview_main, tl, false); tv.setText(tabs[i]); tab.setCustomView(tv); } } private void initViewPager() { vp.setAdapter(new MyPagerAdapter(getChildFragmentManager())); } private void initDatas() { fragments = new ArrayList<>(); for (int i = 0; i < tabs.length; i++) { fragments.add(BDSortFrag.newInstance(tabs[i], tags[i])); } } private class MyPagerAdapter extends FragmentPagerAdapter { public MyPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { return fragments.get(position); } @Override public int getCount() { return fragments.size(); } /** * 如果不是自定义标签布局,需要重写该方法 */ // @Nullable // @Override // public CharSequence getPageTitle(int position) { // return tabs.get(position); // } } }
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.common; import static java.util.Objects.requireNonNull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import javax.annotation.Nullable; import com.google.common.base.MoreObjects; /** * Default {@link RpcRequest} implementation. */ public class DefaultRpcRequest implements RpcRequest { private final Class<?> serviceType; private final String method; private final List<Object> params; /** * Creates a new instance with no parameter. */ public DefaultRpcRequest(Class<?> serviceType, String method) { this(serviceType, method, Collections.emptyList()); } /** * Creates a new instance with a single parameter. */ public DefaultRpcRequest(Class<?> serviceType, String method, @Nullable Object parameter) { this(serviceType, method, Collections.singletonList(parameter)); } /** * Creates a new instance with the specified parameters. */ public DefaultRpcRequest(Class<?> serviceType, String method, Iterable<?> params) { this(serviceType, method, copyParams(params)); } /** * Creates a new instance with the specified parameters. */ public DefaultRpcRequest(Class<?> serviceType, String method, Object... params) { this(serviceType, method, copyParams(params)); } private DefaultRpcRequest(Class<?> serviceType, String method, List<Object> params) { this.serviceType = requireNonNull(serviceType, "serviceType"); this.method = requireNonNull(method, "method"); this.params = params; } private static List<Object> copyParams(Iterable<?> params) { requireNonNull(params, "params"); // Note we do not use ImmutableList.copyOf() here, // because it does not allow a null element and we should allow a null argument. final List<Object> copy; if (params instanceof Collection) { copy = new ArrayList<>(((Collection<?>) params).size()); } else { copy = new ArrayList<>(8); } for (Object p : params) { copy.add(p); } return Collections.unmodifiableList(copy); } private static List<Object> copyParams(Object... params) { return Collections.unmodifiableList(Arrays.asList(requireNonNull(params, "params"))); } @Override public final Class<?> serviceType() { return serviceType; } @Override public final String method() { return method; } @Override public final List<Object> params() { return params; } @Override public int hashCode() { return method().hashCode() * 31 + params().hashCode(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof DefaultRpcRequest)) { return false; } final DefaultRpcRequest that = (DefaultRpcRequest) obj; return method().equals(that.method()) && params().equals(that.params()); } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("serviceType", simpleServiceName()) .add("method", method()) .add("params", params()).toString(); } /** * Returns the simplified name of the {@link #serviceType()}. */ protected final String simpleServiceName() { final Class<?> serviceType = serviceType(); final String fqcn = serviceType.getName(); final int lastDot = fqcn.lastIndexOf('.'); return lastDot < 0 ? fqcn : fqcn.substring(lastDot + 1); } }
/* * Copyright (c) 2021 Huawei Device Co., Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ohos.samples.search.slice; import ohos.agp.components.Component; import ohos.agp.components.Text; import ohos.app.dispatcher.TaskDispatcher; import ohos.app.dispatcher.task.TaskPriority; import ohos.data.search.SearchAbility; import ohos.data.search.connect.ServiceConnectCallback; import ohos.samples.search.ResourceTable; import ohos.aafwk.ability.AbilitySlice; import ohos.aafwk.content.Intent; import ohos.samples.search.utils.LogUtils; import ohos.samples.search.utils.SearchUtils; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; /** * MainAbilitySlice * * @since 2021-07-23 */ public class MainAbilitySlice extends AbilitySlice { private static final String TAG = MainAbilitySlice.class.getSimpleName(); private SearchAbility searchAbility; private SearchUtils searUtils; private Text searchResult; @Override public void onStart(Intent intent) { super.onStart(intent); super.setUIContent(ResourceTable.Layout_ability_main); initComponents(); connectService(); } private void connectService() { LogUtils.info(TAG, "connect search service"); TaskDispatcher task = getGlobalTaskDispatcher(TaskPriority.DEFAULT); searchAbility = new SearchAbility(getContext()); searUtils = new SearchUtils(getContext(), searchAbility); task.asyncDispatch(() -> { CountDownLatch lock = new CountDownLatch(1); // connect to SearchService searchAbility.connect(new ServiceConnectCallback() { @Override public void onConnect() { lock.countDown(); } @Override public void onDisconnect() { } }); try { lock.await(3000, TimeUnit.MILLISECONDS); if (searchAbility.hasConnected()) { getUITaskDispatcher().asyncDispatch(() -> { searchResult.setText(ResourceTable.String_connect_service_succeed); }); } else { getUITaskDispatcher().asyncDispatch(() -> { searchResult.setText(ResourceTable.String_connect_service_failed); }); } } catch (InterruptedException e) { LogUtils.info(TAG, "connect search service failed"); } }); } private void initComponents() { Component btnBuildIndexForms = findComponentById(ResourceTable.Id_btnBuildIndexForms); btnBuildIndexForms.setClickedListener(this::buildIndexForms); Component btnReadIndexForms = findComponentById(ResourceTable.Id_btnReadIndexForms); btnReadIndexForms.setClickedListener(this::readIndexForms); Component btnInsertIndexData = findComponentById(ResourceTable.Id_btnInsertIndexData); btnInsertIndexData.setClickedListener(this::insertIndexData); Component btnUpdateIndexData = findComponentById(ResourceTable.Id_btnUpdateIndexData); btnUpdateIndexData.setClickedListener(this::updateIndexData); Component btnDeleteIndexData = findComponentById(ResourceTable.Id_btnDeleteIndexData); btnDeleteIndexData.setClickedListener(this::deleteIndexData); Component btnDeleteIndexDataByQuery = findComponentById(ResourceTable.Id_btnDeleteIndexDataByQuery); btnDeleteIndexDataByQuery.setClickedListener(this::deleteByQuery); Component btnGetSearchHitCount = findComponentById(ResourceTable.Id_btnGetHitCount); btnGetSearchHitCount.setClickedListener(this::getSearchHitCount); Component btnSearchByGroup = findComponentById(ResourceTable.Id_btnSearchByGroup); btnSearchByGroup.setClickedListener(this::searchByGroup); Component btnSearchByPage = findComponentById(ResourceTable.Id_btnSearchByPage); btnSearchByPage.setClickedListener(this::searchByPage); Component searchComponent = findComponentById(ResourceTable.Id_searchResult); if (searchComponent instanceof Text) { searchResult = (Text) searchComponent; } } private void searchByPage(Component component) { searchResult.setText(searUtils.searchByPage()); } private void searchByGroup(Component component) { searchResult.setText(searUtils.searchByGroup()); } private void getSearchHitCount(Component component) { searchResult.setText(searUtils.getSearchHitCount()); } private void deleteByQuery(Component component) { int result = searUtils.deleteIndexByQuery(); if (result == 1) { LogUtils.info(TAG, "updateIndexData succeed"); searchResult.setText(ResourceTable.String_succeed); } else { LogUtils.error(TAG, "updateIndexData failed"); searchResult.setText(ResourceTable.String_failed); } } private void deleteIndexData(Component component) { int result = searUtils.deleteIndexData(); if (result > 0) { LogUtils.error(TAG, "updateIndexData failed num=" + result); searchResult.setText(ResourceTable.String_failed); } else { LogUtils.info(TAG, "updateIndexData succeed"); searchResult.setText(ResourceTable.String_succeed); } } private void updateIndexData(Component component) { int result = searUtils.updateIndexData(); if (result > 0) { LogUtils.error(TAG, "updateIndexData failed num=" + result); searchResult.setText(ResourceTable.String_failed); } else { LogUtils.info(TAG, "updateIndexData succeed"); searchResult.setText(ResourceTable.String_succeed); } } private void insertIndexData(Component component) { int result = searUtils.insertIndexData(); if (result > 0) { LogUtils.error(TAG, "insertIndexData failed num=" + result); searchResult.setText(ResourceTable.String_failed); } else { LogUtils.info(TAG, "insertIndexData succeed"); searchResult.setText(ResourceTable.String_succeed); } } private void readIndexForms(Component component) { searchResult.setText(searUtils.readIndexForms()); } private void buildIndexForms(Component component) { int result = searUtils.buildIndexForms(); if (result == 1) { LogUtils.info(TAG, "buildIndexForms succeed"); searchResult.setText(ResourceTable.String_succeed); } else { LogUtils.error(TAG, "buildIndexForms failed"); searchResult.setText(ResourceTable.String_failed); } } @Override public void onActive() { super.onActive(); } @Override public void onForeground(Intent intent) { super.onForeground(intent); } }
package edu.harvard.iq.dataverse.api; import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.RoleAssignee; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; import edu.harvard.iq.dataverse.batch.jobs.importer.ImportMode; import edu.harvard.iq.dataverse.datacapturemodule.DataCaptureModuleUtil; import edu.harvard.iq.dataverse.datacapturemodule.ScriptRequestResponse; import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.datasetutility.AddReplaceFileHelper; import edu.harvard.iq.dataverse.datasetutility.DataFileTagException; import edu.harvard.iq.dataverse.datasetutility.NoFilesException; import edu.harvard.iq.dataverse.datasetutility.OptionalFileParams; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.impl.AbstractSubmitToArchiveCommand; import edu.harvard.iq.dataverse.engine.command.impl.AddLockCommand; import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand; import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeletePrivateUrlCommand; import edu.harvard.iq.dataverse.engine.command.impl.DestroyDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.FinalizeDatasetPublicationCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetSpecificPublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetLatestPublishedDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetPrivateUrlCommand; import edu.harvard.iq.dataverse.engine.command.impl.ImportFromFileSystemCommand; import edu.harvard.iq.dataverse.engine.command.impl.LinkDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.ListRoleAssignments; import edu.harvard.iq.dataverse.engine.command.impl.ListVersionsCommand; import edu.harvard.iq.dataverse.engine.command.impl.MoveDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.PublishDatasetResult; import edu.harvard.iq.dataverse.engine.command.impl.RemoveLockCommand; import edu.harvard.iq.dataverse.engine.command.impl.RequestRsyncScriptCommand; import edu.harvard.iq.dataverse.engine.command.impl.ReturnDatasetToAuthorCommand; import edu.harvard.iq.dataverse.engine.command.impl.SetDatasetCitationDateCommand; import edu.harvard.iq.dataverse.engine.command.impl.SetCurationStatusCommand; import edu.harvard.iq.dataverse.engine.command.impl.SubmitDatasetForReviewCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetTargetURLCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetThumbnailCommand; import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.export.ExportService; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.privateurl.PrivateUrl; import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; import edu.harvard.iq.dataverse.dataaccess.DataAccess; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; import edu.harvard.iq.dataverse.dataaccess.S3AccessIO; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.UnforcedCommandException; import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetStorageSizeCommand; import edu.harvard.iq.dataverse.engine.command.impl.RevokeRoleCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDvObjectPIDMetadataCommand; import edu.harvard.iq.dataverse.makedatacount.DatasetExternalCitations; import edu.harvard.iq.dataverse.makedatacount.DatasetExternalCitationsServiceBean; import edu.harvard.iq.dataverse.makedatacount.DatasetMetrics; import edu.harvard.iq.dataverse.makedatacount.DatasetMetricsServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; import edu.harvard.iq.dataverse.metrics.MetricsUtil; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.ArchiverUtil; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.EjbUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.bagit.OREMap; import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.search.IndexServiceBean; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.workflow.Workflow; import edu.harvard.iq.dataverse.workflow.WorkflowContext; import edu.harvard.iq.dataverse.workflow.WorkflowServiceBean; import edu.harvard.iq.dataverse.workflow.WorkflowContext.TriggerType; import java.io.IOException; import java.io.InputStream; import java.io.StringReader; import java.sql.Timestamp; import java.text.MessageFormat; import java.text.SimpleDateFormat; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.*; import java.util.Map.Entry; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import javax.ejb.EJB; import javax.ejb.EJBException; import javax.inject.Inject; import javax.json.*; import javax.json.stream.JsonParsingException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.BadRequestException; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.NotAcceptableException; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import javax.ws.rs.core.UriInfo; import org.apache.commons.lang3.StringUtils; import org.apache.solr.client.solrj.SolrServerException; import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataContentDisposition; import org.glassfish.jersey.media.multipart.FormDataParam; import com.amazonaws.services.s3.model.PartETag; import com.beust.jcommander.Strings; @Path("datasets") public class Datasets extends AbstractApiBean { private static final Logger logger = Logger.getLogger(Datasets.class.getCanonicalName()); @Inject DataverseSession session; @EJB DatasetServiceBean datasetService; @EJB DataverseServiceBean dataverseService; @EJB UserNotificationServiceBean userNotificationService; @EJB PermissionServiceBean permissionService; @EJB AuthenticationServiceBean authenticationServiceBean; @EJB DDIExportServiceBean ddiExportService; @EJB MetadataBlockServiceBean metadataBlockService; @EJB DataFileServiceBean fileService; @EJB IngestServiceBean ingestService; @EJB EjbDataverseEngine commandEngine; @EJB IndexServiceBean indexService; @EJB S3PackageImporter s3PackageImporter; @EJB SettingsServiceBean settingsService; // TODO: Move to AbstractApiBean @EJB DatasetMetricsServiceBean datasetMetricsSvc; @EJB DatasetExternalCitationsServiceBean datasetExternalCitationsService; @EJB EmbargoServiceBean embargoService; @Inject MakeDataCountLoggingServiceBean mdcLogService; @Inject DataverseRequestServiceBean dvRequestService; @Inject WorkflowServiceBean wfService; @Inject DataverseRoleServiceBean dataverseRoleService; /** * Used to consolidate the way we parse and handle dataset versions. * @param <T> */ public interface DsVersionHandler<T> { T handleLatest(); T handleDraft(); T handleSpecific( long major, long minor ); T handleLatestPublished(); } @GET @Path("{id}") public Response getDataset(@PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { return response( req -> { final Dataset retrieved = execCommand(new GetDatasetCommand(req, findDatasetOrDie(id))); final DatasetVersion latest = execCommand(new GetLatestAccessibleDatasetVersionCommand(req, retrieved)); final JsonObjectBuilder jsonbuilder = json(retrieved); //Report MDC if this is a released version (could be draft if user has access, or user may not have access at all and is not getting metadata beyond the minimum) if((latest != null) && latest.isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved); mdcLogService.logEntry(entry); } return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null)); }); } // TODO: // This API call should, ideally, call findUserOrDie() and the GetDatasetCommand // to obtain the dataset that we are trying to export - which would handle // Auth in the process... For now, Auth isn't necessary - since export ONLY // WORKS on published datasets, which are open to the world. -- L.A. 4.5 @GET @Path("/export") @Produces({"application/xml", "application/json", "application/html" }) public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { try { Dataset dataset = datasetService.findByGlobalId(persistentId); if (dataset == null) { return error(Response.Status.NOT_FOUND, "A dataset with the persistentId " + persistentId + " could not be found."); } ExportService instance = ExportService.getInstance(); InputStream is = instance.getExport(dataset, exporter); String mediaType = instance.getMediaType(exporter); //Export is only possible for released (non-draft) dataset versions so we can log without checking to see if this is a request for a draft MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, dataset); mdcLogService.logEntry(entry); return Response.ok() .entity(is) .type(mediaType). build(); } catch (Exception wr) { logger.warning(wr.getMessage()); return error(Response.Status.FORBIDDEN, "Export Failed"); } } @DELETE @Path("{id}") public Response deleteDataset( @PathParam("id") String id) { // Internally, "DeleteDatasetCommand" simply redirects to "DeleteDatasetVersionCommand" // (and there's a comment that says "TODO: remove this command") // do we need an exposed API call for it? // And DeleteDatasetVersionCommand further redirects to DestroyDatasetCommand, // if the dataset only has 1 version... In other words, the functionality // currently provided by this API is covered between the "deleteDraftVersion" and // "destroyDataset" API calls. // (The logic below follows the current implementation of the underlying // commands!) return response( req -> { Dataset doomed = findDatasetOrDie(id); DatasetVersion doomedVersion = doomed.getLatestVersion(); User u = findUserOrDie(); boolean destroy = false; if (doomed.getVersions().size() == 1) { if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Only superusers can delete published datasets")); } destroy = true; } else { if (!doomedVersion.isDraft()) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is a published dataset with multiple versions. This API can only delete the latest version if it is a DRAFT")); } } // Gather the locations of the physical files that will need to be // deleted once the destroy command execution has been finalized: Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomedVersion, destroy); execCommand( new DeleteDatasetCommand(req, findDatasetOrDie(id))); // If we have gotten this far, the destroy command has succeeded, // so we can finalize it by permanently deleting the physical files: // (DataFileService will double-check that the datafiles no // longer exist in the database, before attempting to delete // the physical files) if (!deleteStorageLocations.isEmpty()) { fileService.finalizeFileDeletes(deleteStorageLocations); } return ok("Dataset " + id + " deleted"); }); } @DELETE @Path("{id}/destroy") public Response destroyDataset(@PathParam("id") String id) { return response(req -> { // first check if dataset is released, and if so, if user is a superuser Dataset doomed = findDatasetOrDie(id); User u = findUserOrDie(); if (doomed.isReleased() && (!(u instanceof AuthenticatedUser) || !u.isSuperuser())) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "Destroy can only be called by superusers.")); } // Gather the locations of the physical files that will need to be // deleted once the destroy command execution has been finalized: Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed); execCommand(new DestroyDatasetCommand(doomed, req)); // If we have gotten this far, the destroy command has succeeded, // so we can finalize permanently deleting the physical files: // (DataFileService will double-check that the datafiles no // longer exist in the database, before attempting to delete // the physical files) if (!deleteStorageLocations.isEmpty()) { fileService.finalizeFileDeletes(deleteStorageLocations); } return ok("Dataset " + id + " destroyed"); }); } @DELETE @Path("{id}/versions/{versionId}") public Response deleteDraftVersion( @PathParam("id") String id, @PathParam("versionId") String versionId ){ if ( ! ":draft".equals(versionId) ) { return badRequest("Only the :draft version can be deleted"); } return response( req -> { Dataset dataset = findDatasetOrDie(id); DatasetVersion doomed = dataset.getLatestVersion(); if (!doomed.isDraft()) { throw new WrappedResponse(error(Response.Status.UNAUTHORIZED, "This is NOT a DRAFT version")); } // Gather the locations of the physical files that will need to be // deleted once the destroy command execution has been finalized: Map<Long, String> deleteStorageLocations = fileService.getPhysicalFilesToDelete(doomed); execCommand( new DeleteDatasetVersionCommand(req, dataset)); // If we have gotten this far, the delete command has succeeded - // by either deleting the Draft version of a published dataset, // or destroying an unpublished one. // This means we can finalize permanently deleting the physical files: // (DataFileService will double-check that the datafiles no // longer exist in the database, before attempting to delete // the physical files) if (!deleteStorageLocations.isEmpty()) { fileService.finalizeFileDeletes(deleteStorageLocations); } return ok("Draft version of dataset " + id + " deleted"); }); } @DELETE @Path("{datasetId}/deleteLink/{linkedDataverseId}") public Response deleteDatasetLinkingDataverse( @PathParam("datasetId") String datasetId, @PathParam("linkedDataverseId") String linkedDataverseId) { boolean index = true; return response(req -> { execCommand(new DeleteDatasetLinkingDataverseCommand(req, findDatasetOrDie(datasetId), findDatasetLinkingDataverseOrDie(datasetId, linkedDataverseId), index)); return ok("Link from Dataset " + datasetId + " to linked Dataverse " + linkedDataverseId + " deleted"); }); } @PUT @Path("{id}/citationdate") public Response setCitationDate( @PathParam("id") String id, String dsfTypeName) { return response( req -> { if ( dsfTypeName.trim().isEmpty() ){ return badRequest("Please provide a dataset field type in the requst body."); } DatasetFieldType dsfType = null; if (!":publicationDate".equals(dsfTypeName)) { dsfType = datasetFieldSvc.findByName(dsfTypeName); if (dsfType == null) { return badRequest("Dataset Field Type Name " + dsfTypeName + " not found."); } } execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), dsfType)); return ok("Citation Date for dataset " + id + " set to: " + (dsfType != null ? dsfType.getDisplayName() : "default")); }); } @DELETE @Path("{id}/citationdate") public Response useDefaultCitationDate( @PathParam("id") String id) { return response( req -> { execCommand(new SetDatasetCitationDateCommand(req, findDatasetOrDie(id), null)); return ok("Citation Date for dataset " + id + " set to default"); }); } @GET @Path("{id}/versions") public Response listVersions( @PathParam("id") String id ) { return response( req -> ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id)) ) .stream() .map( d -> json(d) ) .collect(toJsonArray()))); } @GET @Path("{id}/versions/{versionId}") public Response getVersion( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response( req -> { DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") : ok(json(dsv)); }); } @GET @Path("{id}/versions/{versionId}/files") public Response getVersionFiles( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response( req -> ok( jsonFileMetadatas( getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getFileMetadatas()))); } @GET @Path("{id}/dirindex") @Produces("text/html") public Response getFileAccessFolderView(@PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { folderName = folderName == null ? "" : folderName; versionId = versionId == null ? ":latest-published" : versionId; DatasetVersion version; try { DataverseRequest req = createDataverseRequest(findUserOrDie()); version = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); } catch (WrappedResponse wr) { return wr.getResponse(); } String output = FileUtil.formatFolderListingHtml(folderName, version, "", originals != null && originals); // return "NOT FOUND" if there is no such folder in the dataset version: if ("".equals(output)) { return notFound("Folder " + folderName + " does not exist"); } String indexFileName = folderName.equals("") ? ".index.html" : ".index-" + folderName.replace('/', '_') + ".html"; response.setHeader("Content-disposition", "filename=\"" + indexFileName + "\""); return Response.ok() .entity(output) //.type("application/html"). .build(); } @GET @Path("{id}/versions/{versionId}/metadata") public Response getVersionMetadata( @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return response( req -> ok( jsonByBlocks( getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers ) .getDatasetFields()))); } @GET @Path("{id}/versions/{versionNumber}/metadata/{block}") public Response getVersionMetadataBlock( @PathParam("id") String datasetId, @PathParam("versionNumber") String versionNumber, @PathParam("block") String blockName, @Context UriInfo uriInfo, @Context HttpHeaders headers ) { return response( req -> { DatasetVersion dsv = getDatasetVersionOrDie(req, versionNumber, findDatasetOrDie(datasetId), uriInfo, headers ); Map<MetadataBlock, List<DatasetField>> fieldsByBlock = DatasetField.groupByBlock(dsv.getDatasetFields()); for ( Map.Entry<MetadataBlock, List<DatasetField>> p : fieldsByBlock.entrySet() ) { if ( p.getKey().getName().equals(blockName) ) { return ok(json(p.getKey(), p.getValue())); } } return notFound("metadata block named " + blockName + " not found"); }); } @GET @Path("{id}/modifyRegistration") public Response updateDatasetTargetURL(@PathParam("id") String id ) { return response( req -> { execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(id), req)); return ok("Dataset " + id + " target url updated"); }); } @POST @Path("/modifyRegistrationAll") public Response updateDatasetTargetURLAll() { return response( req -> { datasetService.findAll().forEach( ds -> { try { execCommand(new UpdateDatasetTargetURLCommand(findDatasetOrDie(ds.getId().toString()), req)); } catch (WrappedResponse ex) { Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex); } }); return ok("Update All Dataset target url completed"); }); } @POST @Path("{id}/modifyRegistrationMetadata") public Response updateDatasetPIDMetadata(@PathParam("id") String id) { try { Dataset dataset = findDatasetOrDie(id); if (!dataset.isReleased()) { return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.failure.dataset.must.be.released")); } } catch (WrappedResponse ex) { Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex); } return response(req -> { execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(id), req)); List<String> args = Arrays.asList(id); return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.single.dataset", args)); }); } @GET @Path("/modifyRegistrationPIDMetadataAll") public Response updateDatasetPIDMetadataAll() { return response( req -> { datasetService.findAll().forEach( ds -> { try { execCommand(new UpdateDvObjectPIDMetadataCommand(findDatasetOrDie(ds.getId().toString()), req)); } catch (WrappedResponse ex) { Logger.getLogger(Datasets.class.getName()).log(Level.SEVERE, null, ex); } }); return ok(BundleUtil.getStringFromBundle("datasets.api.updatePIDMetadata.success.for.update.all")); }); } @PUT @Path("{id}/versions/{versionId}") @Consumes(MediaType.APPLICATION_JSON) public Response updateDraftVersion( String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId ){ if ( ! ":draft".equals(versionId) ) { return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); } try ( StringReader rdr = new StringReader(jsonBody) ) { DataverseRequest req = createDataverseRequest(findUserOrDie()); Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json); // clear possibly stale fields from the incoming dataset version. // creation and modification dates are updated by the commands. incomingVersion.setId(null); incomingVersion.setVersionNumber(null); incomingVersion.setMinorVersionNumber(null); incomingVersion.setVersionState(DatasetVersion.VersionState.DRAFT); incomingVersion.setDataset(ds); incomingVersion.setCreateTime(null); incomingVersion.setLastUpdateTime(null); if (!incomingVersion.getFileMetadatas().isEmpty()){ return error( Response.Status.BAD_REQUEST, "You may not add files via this api."); } boolean updateDraft = ds.getLatestVersion().isDraft(); DatasetVersion managedVersion; if ( updateDraft ) { final DatasetVersion editVersion = ds.getEditVersion(); editVersion.setDatasetFields(incomingVersion.getDatasetFields()); editVersion.setTermsOfUseAndAccess( incomingVersion.getTermsOfUseAndAccess() ); Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); managedVersion = managedDataset.getEditVersion(); } else { managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion)); } // DatasetVersion managedVersion = execCommand( updateDraft // ? new UpdateDatasetVersionCommand(req, incomingVersion) // : new CreateDatasetVersionCommand(req, ds, incomingVersion)); return ok( json(managedVersion) ); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex); return error( Response.Status.BAD_REQUEST, "Error parsing dataset version: " + ex.getMessage() ); } catch (WrappedResponse ex) { return ex.getResponse(); } } @GET @Path("{id}/versions/{versionId}/metadata") @Produces("application/ld+json, application/json-ld") public Response getVersionJsonLDMetadata(@PathParam("id") String id, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { try { DataverseRequest req = createDataverseRequest(findUserOrDie()); DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(id), uriInfo, headers); OREMap ore = new OREMap(dsv, settingsService.isTrueForKey(SettingsServiceBean.Key.ExcludeEmailFromExport, false)); return ok(ore.getOREMapBuilder(true)); } catch (WrappedResponse ex) { ex.printStackTrace(); return ex.getResponse(); } catch (Exception jpe) { logger.log(Level.SEVERE, "Error getting jsonld metadata for dsv: ", jpe.getLocalizedMessage()); jpe.printStackTrace(); return error(Response.Status.INTERNAL_SERVER_ERROR, jpe.getLocalizedMessage()); } } @GET @Path("{id}/metadata") @Produces("application/ld+json, application/json-ld") public Response getVersionJsonLDMetadata(@PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) { return getVersionJsonLDMetadata(id, ":draft", uriInfo, headers); } @PUT @Path("{id}/metadata") @Consumes("application/ld+json, application/json-ld") public Response updateVersionMetadata(String jsonLDBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam("replace") boolean replaceTerms) { try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); DatasetVersion dsv = ds.getEditVersion(); boolean updateDraft = ds.getLatestVersion().isDraft(); dsv = JSONLDUtil.updateDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc, !replaceTerms, false); DatasetVersion managedVersion; if (updateDraft) { Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); managedVersion = managedDataset.getEditVersion(); } else { managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); } String info = updateDraft ? "Version Updated" : "Version Created"; return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate())); } catch (WrappedResponse ex) { return ex.getResponse(); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody); return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); } } @PUT @Path("{id}/metadata/delete") @Consumes("application/ld+json, application/json-ld") public Response deleteMetadata(String jsonLDBody, @PathParam("id") String id) { try { Dataset ds = findDatasetOrDie(id); DataverseRequest req = createDataverseRequest(findUserOrDie()); DatasetVersion dsv = ds.getEditVersion(); boolean updateDraft = ds.getLatestVersion().isDraft(); dsv = JSONLDUtil.deleteDatasetVersionMDFromJsonLD(dsv, jsonLDBody, metadataBlockService, datasetFieldSvc); DatasetVersion managedVersion; if (updateDraft) { Dataset managedDataset = execCommand(new UpdateDatasetVersionCommand(ds, req)); managedVersion = managedDataset.getEditVersion(); } else { managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); } String info = updateDraft ? "Version Updated" : "Version Created"; return ok(Json.createObjectBuilder().add(info, managedVersion.getVersionDate())); } catch (WrappedResponse ex) { ex.printStackTrace(); return ex.getResponse(); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonLDBody); jpe.printStackTrace(); return error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); } } @PUT @Path("{id}/deleteMetadata") public Response deleteVersionMetadata(String jsonBody, @PathParam("id") String id) throws WrappedResponse { DataverseRequest req = createDataverseRequest(findUserOrDie()); return processDatasetFieldDataDelete(jsonBody, id, req); } private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) { try (StringReader rdr = new StringReader(jsonBody)) { Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); DatasetVersion dsv = ds.getEditVersion(); List<DatasetField> fields = new LinkedList<>(); DatasetField singleField = null; JsonArray fieldsJson = json.getJsonArray("fields"); if (fieldsJson == null) { singleField = jsonParser().parseField(json, Boolean.FALSE); fields.add(singleField); } else { fields = jsonParser().parseMultipleFields(json); } dsv.setVersionState(DatasetVersion.VersionState.DRAFT); List<ControlledVocabularyValue> controlledVocabularyItemsToRemove = new ArrayList<ControlledVocabularyValue>(); List<DatasetFieldValue> datasetFieldValueItemsToRemove = new ArrayList<DatasetFieldValue>(); List<DatasetFieldCompoundValue> datasetFieldCompoundValueItemsToRemove = new ArrayList<DatasetFieldCompoundValue>(); for (DatasetField updateField : fields) { boolean found = false; for (DatasetField dsf : dsv.getDatasetFields()) { if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) { if (dsf.getDatasetFieldType().isAllowMultiples()) { if (updateField.getDatasetFieldType().isControlledVocabulary()) { if (dsf.getDatasetFieldType().isAllowMultiples()) { for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) { for (ControlledVocabularyValue existing : dsf.getControlledVocabularyValues()) { if (existing.getStrValue().equals(cvv.getStrValue())) { found = true; controlledVocabularyItemsToRemove.add(existing); } } if (!found) { logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found."); return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + cvv.getStrValue() + " not found."); } } for (ControlledVocabularyValue remove : controlledVocabularyItemsToRemove) { dsf.getControlledVocabularyValues().remove(remove); } } else { if (dsf.getSingleControlledVocabularyValue().getStrValue().equals(updateField.getSingleControlledVocabularyValue().getStrValue())) { found = true; dsf.setSingleControlledVocabularyValue(null); } } } else { if (!updateField.getDatasetFieldType().isCompound()) { if (dsf.getDatasetFieldType().isAllowMultiples()) { for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) { for (DatasetFieldValue edsfv : dsf.getDatasetFieldValues()) { if (edsfv.getDisplayValue().equals(dfv.getDisplayValue())) { found = true; datasetFieldValueItemsToRemove.add(dfv); } } if (!found) { logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found."); return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + dfv.getDisplayValue() + " not found."); } } datasetFieldValueItemsToRemove.forEach((remove) -> { dsf.getDatasetFieldValues().remove(remove); }); } else { if (dsf.getSingleValue().getDisplayValue().equals(updateField.getSingleValue().getDisplayValue())) { found = true; dsf.setSingleValue(null); } } } else { for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) { String deleteVal = getCompoundDisplayValue(dfcv); for (DatasetFieldCompoundValue existing : dsf.getDatasetFieldCompoundValues()) { String existingString = getCompoundDisplayValue(existing); if (existingString.equals(deleteVal)) { found = true; datasetFieldCompoundValueItemsToRemove.add(existing); } } datasetFieldCompoundValueItemsToRemove.forEach((remove) -> { dsf.getDatasetFieldCompoundValues().remove(remove); }); if (!found) { logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found."); return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + deleteVal + " not found."); } } } } } else { found = true; dsf.setSingleValue(null); dsf.setSingleControlledVocabularyValue(null); } break; } } if (!found){ String displayValue = !updateField.getDisplayValue().isEmpty() ? updateField.getDisplayValue() : updateField.getCompoundDisplayValue(); logger.log(Level.SEVERE, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." ); return error(Response.Status.BAD_REQUEST, "Delete metadata failed: " + updateField.getDatasetFieldType().getDisplayName() + ": " + displayValue + " not found." ); } } boolean updateDraft = ds.getLatestVersion().isDraft(); DatasetVersion managedVersion = updateDraft ? execCommand(new UpdateDatasetVersionCommand(ds, req)).getEditVersion() : execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); return ok(json(managedVersion)); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex); return error(Response.Status.BAD_REQUEST, "Error processing metadata delete: " + ex.getMessage()); } catch (WrappedResponse ex) { logger.log(Level.SEVERE, "Delete metadata error: " + ex.getMessage(), ex); return ex.getResponse(); } } private String getCompoundDisplayValue (DatasetFieldCompoundValue dscv){ String returnString = ""; for (DatasetField dsf : dscv.getChildDatasetFields()) { for (String value : dsf.getValues()) { if (!(value == null)) { returnString += (returnString.isEmpty() ? "" : "; ") + value.trim(); } } } return returnString; } @PUT @Path("{id}/editMetadata") public Response editVersionMetadata(String jsonBody, @PathParam("id") String id, @QueryParam("replace") Boolean replace) { Boolean replaceData = replace != null; DataverseRequest req = null; try { req = createDataverseRequest(findUserOrDie()); } catch (WrappedResponse ex) { logger.log(Level.SEVERE, "Edit metdata error: " + ex.getMessage(), ex); return ex.getResponse(); } return processDatasetUpdate(jsonBody, id, req, replaceData); } private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){ try (StringReader rdr = new StringReader(jsonBody)) { Dataset ds = findDatasetOrDie(id); JsonObject json = Json.createReader(rdr).readObject(); DatasetVersion dsv = ds.getEditVersion(); List<DatasetField> fields = new LinkedList<>(); DatasetField singleField = null; JsonArray fieldsJson = json.getJsonArray("fields"); if( fieldsJson == null ){ singleField = jsonParser().parseField(json, Boolean.FALSE); fields.add(singleField); } else{ fields = jsonParser().parseMultipleFields(json); } String valdationErrors = validateDatasetFieldValues(fields); if (!valdationErrors.isEmpty()) { logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + valdationErrors, valdationErrors); return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + valdationErrors); } dsv.setVersionState(DatasetVersion.VersionState.DRAFT); //loop through the update fields // and compare to the version fields //if exist add/replace values //if not add entire dsf for (DatasetField updateField : fields) { boolean found = false; for (DatasetField dsf : dsv.getDatasetFields()) { if (dsf.getDatasetFieldType().equals(updateField.getDatasetFieldType())) { found = true; if (dsf.isEmpty() || dsf.getDatasetFieldType().isAllowMultiples() || replaceData) { List priorCVV = new ArrayList<>(); String cvvDisplay = ""; if (updateField.getDatasetFieldType().isControlledVocabulary()) { cvvDisplay = dsf.getDisplayValue(); for (ControlledVocabularyValue cvvOld : dsf.getControlledVocabularyValues()) { priorCVV.add(cvvOld); } } if (replaceData) { if (dsf.getDatasetFieldType().isAllowMultiples()) { dsf.setDatasetFieldCompoundValues(new ArrayList<>()); dsf.setDatasetFieldValues(new ArrayList<>()); dsf.setControlledVocabularyValues(new ArrayList<>()); priorCVV.clear(); dsf.getControlledVocabularyValues().clear(); } else { dsf.setSingleValue(""); dsf.setSingleControlledVocabularyValue(null); } } if (updateField.getDatasetFieldType().isControlledVocabulary()) { if (dsf.getDatasetFieldType().isAllowMultiples()) { for (ControlledVocabularyValue cvv : updateField.getControlledVocabularyValues()) { if (!cvvDisplay.contains(cvv.getStrValue())) { priorCVV.add(cvv); } } dsf.setControlledVocabularyValues(priorCVV); } else { dsf.setSingleControlledVocabularyValue(updateField.getSingleControlledVocabularyValue()); } } else { if (!updateField.getDatasetFieldType().isCompound()) { if (dsf.getDatasetFieldType().isAllowMultiples()) { for (DatasetFieldValue dfv : updateField.getDatasetFieldValues()) { if (!dsf.getDisplayValue().contains(dfv.getDisplayValue())) { dfv.setDatasetField(dsf); dsf.getDatasetFieldValues().add(dfv); } } } else { dsf.setSingleValue(updateField.getValue()); } } else { for (DatasetFieldCompoundValue dfcv : updateField.getDatasetFieldCompoundValues()) { if (!dsf.getCompoundDisplayValue().contains(updateField.getCompoundDisplayValue())) { dfcv.setParentDatasetField(dsf); dsf.setDatasetVersion(dsv); dsf.getDatasetFieldCompoundValues().add(dfcv); } } } } } else { if (!dsf.isEmpty() && !dsf.getDatasetFieldType().isAllowMultiples() || !replaceData) { return error(Response.Status.BAD_REQUEST, "You may not add data to a field that already has data and does not allow multiples. Use replace=true to replace existing data (" + dsf.getDatasetFieldType().getDisplayName() + ")"); } } break; } } if (!found) { updateField.setDatasetVersion(dsv); dsv.getDatasetFields().add(updateField); } } boolean updateDraft = ds.getLatestVersion().isDraft(); DatasetVersion managedVersion; if (updateDraft) { managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getEditVersion(); } else { managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, dsv)); } return ok(json(managedVersion)); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex); return error(Response.Status.BAD_REQUEST, "Error parsing dataset update: " + ex.getMessage()); } catch (WrappedResponse ex) { logger.log(Level.SEVERE, "Update metdata error: " + ex.getMessage(), ex); return ex.getResponse(); } } private String validateDatasetFieldValues(List<DatasetField> fields) { StringBuilder error = new StringBuilder(); for (DatasetField dsf : fields) { if (dsf.getDatasetFieldType().isAllowMultiples() && dsf.getControlledVocabularyValues().isEmpty() && dsf.getDatasetFieldCompoundValues().isEmpty() && dsf.getDatasetFieldValues().isEmpty()) { error.append("Empty multiple value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" "); } else if (!dsf.getDatasetFieldType().isAllowMultiples() && dsf.getSingleValue().getValue().isEmpty()) { error.append("Empty value for field: ").append(dsf.getDatasetFieldType().getDisplayName()).append(" "); } } if (!error.toString().isEmpty()) { return (error.toString()); } return ""; } /** * @deprecated This was shipped as a GET but should have been a POST, see https://github.com/IQSS/dataverse/issues/2431 */ @GET @Path("{id}/actions/:publish") @Deprecated public Response publishDataseUsingGetDeprecated( @PathParam("id") String id, @QueryParam("type") String type ) { logger.info("publishDataseUsingGetDeprecated called on id " + id + ". Encourage use of POST rather than GET, which is deprecated."); return publishDataset(id, type, false); } @POST @Path("{id}/actions/:publish") public Response publishDataset(@PathParam("id") String id, @QueryParam("type") String type, @QueryParam("assureIsIndexed") boolean mustBeIndexed) { try { if (type == null) { return error(Response.Status.BAD_REQUEST, "Missing 'type' parameter (either 'major','minor', or 'updatecurrent')."); } boolean updateCurrent=false; AuthenticatedUser user = findAuthenticatedUserOrDie(); type = type.toLowerCase(); boolean isMinor=false; switch (type) { case "minor": isMinor = true; break; case "major": isMinor = false; break; case "updatecurrent": if(user.isSuperuser()) { updateCurrent=true; } else { return error(Response.Status.FORBIDDEN, "Only superusers can update the current version"); } break; default: return error(Response.Status.BAD_REQUEST, "Illegal 'type' parameter value '" + type + "'. It needs to be either 'major', 'minor', or 'updatecurrent'."); } Dataset ds = findDatasetOrDie(id); if (mustBeIndexed) { logger.fine("IT: " + ds.getIndexTime()); logger.fine("MT: " + ds.getModificationTime()); logger.fine("PIT: " + ds.getPermissionIndexTime()); logger.fine("PMT: " + ds.getPermissionModificationTime()); if (ds.getIndexTime() != null && ds.getModificationTime() != null) { logger.fine("ITMT: " + (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0)); } /* * Some calls, such as the /datasets/actions/:import* commands do not set the * modification or permission modification times. The checks here are trying to * see if indexing or permissionindexing could be pending, so they check to see * if the relevant modification time is set and if so, whether the index is also * set and if so, if it after the modification time. If the modification time is * set and the index time is null or is before the mod time, the 409/conflict * error is returned. * */ if ((ds.getModificationTime()!=null && (ds.getIndexTime() == null || (ds.getIndexTime().compareTo(ds.getModificationTime()) <= 0))) || (ds.getPermissionModificationTime()!=null && (ds.getPermissionIndexTime() == null || (ds.getPermissionIndexTime().compareTo(ds.getPermissionModificationTime()) <= 0)))) { return error(Response.Status.CONFLICT, "Dataset is awaiting indexing"); } } if (updateCurrent) { /* * Note: The code here mirrors that in the * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes * to the core logic (i.e. beyond updating the messaging about results) should * be applied to the code there as well. */ String errorMsg = null; String successMsg = null; try { CuratePublishedDatasetVersionCommand cmd = new CuratePublishedDatasetVersionCommand(ds, createDataverseRequest(user)); ds = commandEngine.submit(cmd); successMsg = BundleUtil.getStringFromBundle("datasetversion.update.success"); // If configured, update archive copy as well String className = settingsService.get(SettingsServiceBean.Key.ArchiverClassName.toString()); DatasetVersion updateVersion = ds.getLatestVersion(); AbstractSubmitToArchiveCommand archiveCommand = ArchiverUtil.createSubmitToArchiveCommand(className, createDataverseRequest(user), updateVersion); if (archiveCommand != null) { // Delete the record of any existing copy since it is now out of date/incorrect updateVersion.setArchivalCopyLocation(null); /* * Then try to generate and submit an archival copy. Note that running this * command within the CuratePublishedDatasetVersionCommand was causing an error: * "The attribute [id] of class * [edu.harvard.iq.dataverse.DatasetFieldCompoundValue] is mapped to a primary * key column in the database. Updates are not allowed." To avoid that, and to * simplify reporting back to the GUI whether this optional step succeeded, I've * pulled this out as a separate submit(). */ try { updateVersion = commandEngine.submit(archiveCommand); if (updateVersion.getArchivalCopyLocation() != null) { successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.success"); } else { successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure"); } } catch (CommandException ex) { successMsg = BundleUtil.getStringFromBundle("datasetversion.update.archive.failure") + " - " + ex.toString(); logger.severe(ex.getMessage()); } } } catch (CommandException ex) { errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString(); logger.severe(ex.getMessage()); } if (errorMsg != null) { return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg); } else { return Response.ok(Json.createObjectBuilder() .add("status", STATUS_OK) .add("status_details", successMsg) .add("data", json(ds)).build()) .type(MediaType.APPLICATION_JSON) .build(); } } else { PublishDatasetResult res = execCommand(new PublishDatasetCommand(ds, createDataverseRequest(user), isMinor)); return res.isWorkflow() ? accepted(json(res.getDataset())) : ok(json(res.getDataset())); } } catch (WrappedResponse ex) { return ex.getResponse(); } } @POST @Path("{id}/actions/:releasemigrated") @Consumes("application/ld+json, application/json-ld") public Response publishMigratedDataset(String jsonldBody, @PathParam("id") String id, @DefaultValue("false") @QueryParam ("updatepidatprovider") boolean contactPIDProvider) { try { AuthenticatedUser user = findAuthenticatedUserOrDie(); if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Only superusers can release migrated datasets"); } Dataset ds = findDatasetOrDie(id); try { JsonObject metadata = JSONLDUtil.decontextualizeJsonLD(jsonldBody); String pubDate = metadata.getString(JsonLDTerm.schemaOrg("datePublished").getUrl()); logger.fine("Submitted date: " + pubDate); LocalDateTime dateTime = null; if(!StringUtils.isEmpty(pubDate)) { dateTime = JSONLDUtil.getDateTimeFrom(pubDate); final Timestamp time = Timestamp.valueOf(dateTime); //Set version release date ds.getLatestVersion().setReleaseTime(new Date(time.getTime())); } // dataset.getPublicationDateFormattedYYYYMMDD()) // Assign a version number if not set if (ds.getLatestVersion().getVersionNumber() == null) { if (ds.getVersions().size() == 1) { // First Release ds.getLatestVersion().setVersionNumber(Long.valueOf(1)); ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0)); } else if (ds.getLatestVersion().isMinorUpdate()) { ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber())); ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(ds.getMinorVersionNumber() + 1)); } else { // major, non-first release ds.getLatestVersion().setVersionNumber(Long.valueOf(ds.getVersionNumber() + 1)); ds.getLatestVersion().setMinorVersionNumber(Long.valueOf(0)); } } if(ds.getLatestVersion().getVersionNumber()==1 && ds.getLatestVersion().getMinorVersionNumber()==0) { //Also set publication date if this is the first if(dateTime != null) { ds.setPublicationDate(Timestamp.valueOf(dateTime)); } // Release User is only set in FinalizeDatasetPublicationCommand if the pub date // is null, so set it here. ds.setReleaseUser((AuthenticatedUser) user); } } catch (Exception e) { logger.fine(e.getMessage()); throw new BadRequestException("Unable to set publication date (" + JsonLDTerm.schemaOrg("datePublished").getUrl() + "): " + e.getMessage()); } /* * Note: The code here mirrors that in the * edu.harvard.iq.dataverse.DatasetPage:updateCurrentVersion method. Any changes * to the core logic (i.e. beyond updating the messaging about results) should * be applied to the code there as well. */ String errorMsg = null; Optional<Workflow> prePubWf = wfService.getDefaultWorkflow(TriggerType.PrePublishDataset); try { // ToDo - should this be in onSuccess()? May relate to todo above if (prePubWf.isPresent()) { // Start the workflow, the workflow will call FinalizeDatasetPublication later wfService.start(prePubWf.get(), new WorkflowContext(createDataverseRequest(user), ds, TriggerType.PrePublishDataset, !contactPIDProvider), false); } else { FinalizeDatasetPublicationCommand cmd = new FinalizeDatasetPublicationCommand(ds, createDataverseRequest(user), !contactPIDProvider); ds = commandEngine.submit(cmd); } } catch (CommandException ex) { errorMsg = BundleUtil.getStringFromBundle("datasetversion.update.failure") + " - " + ex.toString(); logger.severe(ex.getMessage()); } if (errorMsg != null) { return error(Response.Status.INTERNAL_SERVER_ERROR, errorMsg); } else { return prePubWf.isPresent() ? accepted(json(ds)) : ok(json(ds)); } } catch (WrappedResponse ex) { return ex.getResponse(); } } @POST @Path("{id}/move/{targetDataverseAlias}") public Response moveDataset(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) { try { User u = findUserOrDie(); Dataset ds = findDatasetOrDie(id); Dataverse target = dataverseService.findByAlias(targetDataverseAlias); if (target == null) { return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.targetDataverseNotFound")); } //Command requires Super user - it will be tested by the command execCommand(new MoveDatasetCommand( createDataverseRequest(u), ds, target, force )); return ok(BundleUtil.getStringFromBundle("datasets.api.moveDataset.success")); } catch (WrappedResponse ex) { if (ex.getCause() instanceof UnforcedCommandException) { return ex.refineResponse(BundleUtil.getStringFromBundle("datasets.api.moveDataset.error.suggestForce")); } else { return ex.getResponse(); } } } @POST @Path("{id}/files/actions/:set-embargo") public Response createFileEmbargo(@PathParam("id") String id, String jsonBody){ // user is authenticated AuthenticatedUser authenticatedUser = null; try { authenticatedUser = findAuthenticatedUserOrDie(); } catch (WrappedResponse ex) { return error(Status.UNAUTHORIZED, "Authentication is required."); } Dataset dataset; try { dataset = findDatasetOrDie(id); } catch (WrappedResponse ex) { return ex.getResponse(); } // client is superadmin or (client has EditDataset permission on these files and files are unreleased) /* * This is only a pre-test - if there's no draft version, there are clearly no * files that a normal user can change. The converse is not true. A draft * version could contain only files that have already been released. Further, we * haven't checked the file list yet so the user could still be trying to change * released files even if there are some unreleased/draft-only files. Doing this * check here does avoid having to do further parsing for some error cases. It * also checks the user can edit this dataset, so we don't have to make that * check later. */ if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) { return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions"); } // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400 long maxEmbargoDurationInMonths = 0; try { maxEmbargoDurationInMonths = Long.parseLong(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString())); } catch (NumberFormatException nfe){ if (nfe.getMessage().contains("null")) { return error(Status.BAD_REQUEST, "No Embargoes allowed"); } } if (maxEmbargoDurationInMonths == 0){ return error(Status.BAD_REQUEST, "No Embargoes allowed"); } StringReader rdr = new StringReader(jsonBody); JsonObject json = Json.createReader(rdr).readObject(); Embargo embargo = new Embargo(); LocalDate currentDateTime = LocalDate.now(); LocalDate dateAvailable = LocalDate.parse(json.getString("dateAvailable")); // check :MaxEmbargoDurationInMonths if -1 LocalDate maxEmbargoDateTime = maxEmbargoDurationInMonths != -1 ? LocalDate.now().plusMonths(maxEmbargoDurationInMonths) : null; // dateAvailable is not in the past if (dateAvailable.isAfter(currentDateTime)){ embargo.setDateAvailable(dateAvailable); } else { return error(Status.BAD_REQUEST, "Date available can not be in the past"); } // dateAvailable is within limits if (maxEmbargoDateTime != null){ if (dateAvailable.isAfter(maxEmbargoDateTime)){ return error(Status.BAD_REQUEST, "Date available can not exceed MaxEmbargoDurationInMonths: "+maxEmbargoDurationInMonths); } } embargo.setReason(json.getString("reason")); List<DataFile> datasetFiles = dataset.getFiles(); List<DataFile> filesToEmbargo = new LinkedList<>(); // extract fileIds from json, find datafiles and add to list if (json.containsKey("fileIds")){ JsonArray fileIds = json.getJsonArray("fileIds"); for (JsonValue jsv : fileIds) { try { DataFile dataFile = findDataFileOrDie(jsv.toString()); filesToEmbargo.add(dataFile); } catch (WrappedResponse ex) { return ex.getResponse(); } } } List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>(); // check if files belong to dataset if (datasetFiles.containsAll(filesToEmbargo)) { JsonArrayBuilder restrictedFiles = Json.createArrayBuilder(); boolean badFiles = false; for (DataFile datafile : filesToEmbargo) { // superuser can overrule an existing embargo, even on released files if (datafile.isReleased() && !authenticatedUser.isSuperuser()) { restrictedFiles.add(datafile.getId()); badFiles = true; } } if (badFiles) { return Response.status(Status.FORBIDDEN) .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", STATUS_ERROR) .add("message", "You do not have permission to embargo the following files") .add("files", restrictedFiles).build()) .type(MediaType.APPLICATION_JSON_TYPE).build(); } embargo=embargoService.merge(embargo); // Good request, so add the embargo. Track any existing embargoes so we can // delete them if there are no files left that reference them. for (DataFile datafile : filesToEmbargo) { Embargo emb = datafile.getEmbargo(); if (emb != null) { emb.getDataFiles().remove(datafile); if (emb.getDataFiles().isEmpty()) { orphanedEmbargoes.add(emb); } } // Save merges the datafile with an embargo into the context datafile.setEmbargo(embargo); fileService.save(datafile); } //Call service to get action logged long embargoId = embargoService.save(embargo, authenticatedUser.getIdentifier()); if (orphanedEmbargoes.size() > 0) { for (Embargo emb : orphanedEmbargoes) { embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier()); } } //If superuser, report changes to any released files if (authenticatedUser.isSuperuser()) { String releasedFiles = filesToEmbargo.stream().filter(d -> d.isReleased()) .map(d -> d.getId().toString()).collect(Collectors.joining(",")); if (!releasedFiles.isBlank()) { actionLogSvc .log(new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoAddedTo") .setInfo("Embargo id: " + embargo.getId() + " added for released file(s), id(s) " + releasedFiles + ".") .setUserIdentifier(authenticatedUser.getIdentifier())); } } return ok(Json.createObjectBuilder().add("message", "Files were embargoed")); } else { return error(BAD_REQUEST, "Not all files belong to dataset"); } } @POST @Path("{id}/files/actions/:unset-embargo") public Response removeFileEmbargo(@PathParam("id") String id, String jsonBody){ // user is authenticated AuthenticatedUser authenticatedUser = null; try { authenticatedUser = findAuthenticatedUserOrDie(); } catch (WrappedResponse ex) { return error(Status.UNAUTHORIZED, "Authentication is required."); } Dataset dataset; try { dataset = findDatasetOrDie(id); } catch (WrappedResponse ex) { return ex.getResponse(); } // client is superadmin or (client has EditDataset permission on these files and files are unreleased) // check if files are unreleased(DRAFT?) //ToDo - here and below - check the release status of files and not the dataset state (draft dataset version still can have released files) if ((!authenticatedUser.isSuperuser() && (dataset.getLatestVersion().getVersionState() != DatasetVersion.VersionState.DRAFT) ) || !permissionService.userOn(authenticatedUser, dataset).has(Permission.EditDataset)) { return error(Status.FORBIDDEN, "Either the files are released and user is not a superuser or user does not have EditDataset permissions"); } // check if embargoes are allowed(:MaxEmbargoDurationInMonths), gets the :MaxEmbargoDurationInMonths setting variable, if 0 or not set(null) return 400 //Todo - is 400 right for embargoes not enabled //Todo - handle getting Long for duration in one place (settings getLong method? or is that only in wrapper (view scoped)? int maxEmbargoDurationInMonths = 0; try { maxEmbargoDurationInMonths = Integer.parseInt(settingsService.get(SettingsServiceBean.Key.MaxEmbargoDurationInMonths.toString())); } catch (NumberFormatException nfe){ if (nfe.getMessage().contains("null")) { return error(Status.BAD_REQUEST, "No Embargoes allowed"); } } if (maxEmbargoDurationInMonths == 0){ return error(Status.BAD_REQUEST, "No Embargoes allowed"); } StringReader rdr = new StringReader(jsonBody); JsonObject json = Json.createReader(rdr).readObject(); List<DataFile> datasetFiles = dataset.getFiles(); List<DataFile> embargoFilesToUnset = new LinkedList<>(); // extract fileIds from json, find datafiles and add to list if (json.containsKey("fileIds")){ JsonArray fileIds = json.getJsonArray("fileIds"); for (JsonValue jsv : fileIds) { try { DataFile dataFile = findDataFileOrDie(jsv.toString()); embargoFilesToUnset.add(dataFile); } catch (WrappedResponse ex) { return ex.getResponse(); } } } List<Embargo> orphanedEmbargoes = new ArrayList<Embargo>(); // check if files belong to dataset if (datasetFiles.containsAll(embargoFilesToUnset)) { JsonArrayBuilder restrictedFiles = Json.createArrayBuilder(); boolean badFiles = false; for (DataFile datafile : embargoFilesToUnset) { // superuser can overrule an existing embargo, even on released files if (datafile.getEmbargo()==null || ((datafile.isReleased() && datafile.getEmbargo() != null) && !authenticatedUser.isSuperuser())) { restrictedFiles.add(datafile.getId()); badFiles = true; } } if (badFiles) { return Response.status(Status.FORBIDDEN) .entity(NullSafeJsonBuilder.jsonObjectBuilder().add("status", STATUS_ERROR) .add("message", "The following files do not have embargoes or you do not have permission to remove their embargoes") .add("files", restrictedFiles).build()) .type(MediaType.APPLICATION_JSON_TYPE).build(); } // Good request, so remove the embargo from the files. Track any existing embargoes so we can // delete them if there are no files left that reference them. for (DataFile datafile : embargoFilesToUnset) { Embargo emb = datafile.getEmbargo(); if (emb != null) { emb.getDataFiles().remove(datafile); if (emb.getDataFiles().isEmpty()) { orphanedEmbargoes.add(emb); } } // Save merges the datafile with an embargo into the context datafile.setEmbargo(null); fileService.save(datafile); } if (orphanedEmbargoes.size() > 0) { for (Embargo emb : orphanedEmbargoes) { embargoService.deleteById(emb.getId(), authenticatedUser.getIdentifier()); } } String releasedFiles = embargoFilesToUnset.stream().filter(d -> d.isReleased()).map(d->d.getId().toString()).collect(Collectors.joining(",")); if(!releasedFiles.isBlank()) { ActionLogRecord removeRecord = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "embargoRemovedFrom").setInfo("Embargo removed from released file(s), id(s) " + releasedFiles + "."); removeRecord.setUserIdentifier(authenticatedUser.getIdentifier()); actionLogSvc.log(removeRecord); } return ok(Json.createObjectBuilder().add("message", "Embargo(es) were removed from files")); } else { return error(BAD_REQUEST, "Not all files belong to dataset"); } } @PUT @Path("{linkedDatasetId}/link/{linkingDataverseAlias}") public Response linkDataset(@PathParam("linkedDatasetId") String linkedDatasetId, @PathParam("linkingDataverseAlias") String linkingDataverseAlias) { try{ User u = findUserOrDie(); Dataset linked = findDatasetOrDie(linkedDatasetId); Dataverse linking = findDataverseOrDie(linkingDataverseAlias); if (linked == null){ return error(Response.Status.BAD_REQUEST, "Linked Dataset not found."); } if (linking == null){ return error(Response.Status.BAD_REQUEST, "Linking Dataverse not found."); } execCommand(new LinkDatasetCommand( createDataverseRequest(u), linking, linked )); return ok("Dataset " + linked.getId() + " linked successfully to " + linking.getAlias()); } catch (WrappedResponse ex) { return ex.getResponse(); } } @GET @Path("{id}/links") public Response getLinks(@PathParam("id") String idSupplied ) { try { User u = findUserOrDie(); if (!u.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Not a superuser"); } Dataset dataset = findDatasetOrDie(idSupplied); long datasetId = dataset.getId(); List<Dataverse> dvsThatLinkToThisDatasetId = dataverseSvc.findDataversesThatLinkToThisDatasetId(datasetId); JsonArrayBuilder dataversesThatLinkToThisDatasetIdBuilder = Json.createArrayBuilder(); for (Dataverse dataverse : dvsThatLinkToThisDatasetId) { dataversesThatLinkToThisDatasetIdBuilder.add(dataverse.getAlias() + " (id " + dataverse.getId() + ")"); } JsonObjectBuilder response = Json.createObjectBuilder(); response.add("dataverses that link to dataset id " + datasetId, dataversesThatLinkToThisDatasetIdBuilder); return ok(response); } catch (WrappedResponse wr) { return wr.getResponse(); } } /** * Add a given assignment to a given user or group * @param ra role assignment DTO * @param id dataset id * @param apiKey */ @POST @Path("{identifier}/assignments") public Response createAssignment(RoleAssignmentDTO ra, @PathParam("identifier") String id, @QueryParam("key") String apiKey) { try { Dataset dataset = findDatasetOrDie(id); RoleAssignee assignee = findAssignee(ra.getAssignee()); if (assignee == null) { return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.not.found.error")); } DataverseRole theRole; Dataverse dv = dataset.getOwner(); theRole = null; while ((theRole == null) && (dv != null)) { for (DataverseRole aRole : rolesSvc.availableRoles(dv.getId())) { if (aRole.getAlias().equals(ra.getRole())) { theRole = aRole; break; } } dv = dv.getOwner(); } if (theRole == null) { List<String> args = Arrays.asList(ra.getRole(), dataset.getOwner().getDisplayName()); return error(Status.BAD_REQUEST, BundleUtil.getStringFromBundle("datasets.api.grant.role.not.found.error", args)); } String privateUrlToken = null; return ok( json(execCommand(new AssignRoleCommand(assignee, theRole, dataset, createDataverseRequest(findUserOrDie()), privateUrlToken)))); } catch (WrappedResponse ex) { List<String> args = Arrays.asList(ex.getMessage()); logger.log(Level.WARNING, BundleUtil.getStringFromBundle("datasets.api.grant.role.cant.create.assignment.error", args)); return ex.getResponse(); } } @DELETE @Path("{identifier}/assignments/{id}") public Response deleteAssignment(@PathParam("id") long assignmentId, @PathParam("identifier") String dsId) { RoleAssignment ra = em.find(RoleAssignment.class, assignmentId); if (ra != null) { try { findDatasetOrDie(dsId); execCommand(new RevokeRoleCommand(ra, createDataverseRequest(findUserOrDie()))); List<String> args = Arrays.asList(ra.getRole().getName(), ra.getAssigneeIdentifier(), ra.getDefinitionPoint().accept(DvObject.NamePrinter)); return ok(BundleUtil.getStringFromBundle("datasets.api.revoke.role.success", args)); } catch (WrappedResponse ex) { return ex.getResponse(); } } else { List<String> args = Arrays.asList(Long.toString(assignmentId)); return error(Status.NOT_FOUND, BundleUtil.getStringFromBundle("datasets.api.revoke.role.not.found.error", args)); } } @GET @Path("{identifier}/assignments") public Response getAssignments(@PathParam("identifier") String id) { return response( req -> ok( execCommand( new ListRoleAssignments(req, findDatasetOrDie(id))) .stream().map(ra->json(ra)).collect(toJsonArray())) ); } @GET @Path("{id}/privateUrl") public Response getPrivateUrlData(@PathParam("id") String idSupplied) { return response( req -> { PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, findDatasetOrDie(idSupplied))); return (privateUrl != null) ? ok(json(privateUrl)) : error(Response.Status.NOT_FOUND, "Private URL not found."); }); } @POST @Path("{id}/privateUrl") public Response createPrivateUrl(@PathParam("id") String idSupplied,@DefaultValue("false") @QueryParam ("anonymizedAccess") boolean anonymizedAccess) { if(anonymizedAccess && settingsSvc.getValueForKey(SettingsServiceBean.Key.AnonymizedFieldTypeNames)==null) { throw new NotAcceptableException("Anonymized Access not enabled"); } return response( req -> ok(json(execCommand( new CreatePrivateUrlCommand(req, findDatasetOrDie(idSupplied), anonymizedAccess))))); } @DELETE @Path("{id}/privateUrl") public Response deletePrivateUrl(@PathParam("id") String idSupplied) { return response( req -> { Dataset dataset = findDatasetOrDie(idSupplied); PrivateUrl privateUrl = execCommand(new GetPrivateUrlCommand(req, dataset)); if (privateUrl != null) { execCommand(new DeletePrivateUrlCommand(req, dataset)); return ok("Private URL deleted."); } else { return notFound("No Private URL to delete."); } }); } @GET @Path("{id}/thumbnail/candidates") public Response getDatasetThumbnailCandidates(@PathParam("id") String idSupplied) { try { Dataset dataset = findDatasetOrDie(idSupplied); boolean canUpdateThumbnail = false; try { canUpdateThumbnail = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset).canIssue(UpdateDatasetThumbnailCommand.class); } catch (WrappedResponse ex) { logger.info("Exception thrown while trying to figure out permissions while getting thumbnail for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage()); } if (!canUpdateThumbnail) { return error(Response.Status.FORBIDDEN, "You are not permitted to list dataset thumbnail candidates."); } JsonArrayBuilder data = Json.createArrayBuilder(); boolean considerDatasetLogoAsCandidate = true; for (DatasetThumbnail datasetThumbnail : DatasetUtil.getThumbnailCandidates(dataset, considerDatasetLogoAsCandidate, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE)) { JsonObjectBuilder candidate = Json.createObjectBuilder(); String base64image = datasetThumbnail.getBase64image(); if (base64image != null) { logger.fine("found a candidate!"); candidate.add("base64image", base64image); } DataFile dataFile = datasetThumbnail.getDataFile(); if (dataFile != null) { candidate.add("dataFileId", dataFile.getId()); } data.add(candidate); } return ok(data); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + "."); } } @GET @Produces({"image/png"}) @Path("{id}/thumbnail") public Response getDatasetThumbnail(@PathParam("id") String idSupplied) { try { Dataset dataset = findDatasetOrDie(idSupplied); InputStream is = DatasetUtil.getThumbnailAsInputStream(dataset, ImageThumbConverter.DEFAULT_CARDIMAGE_SIZE); if(is == null) { return notFound("Thumbnail not available"); } return Response.ok(is).build(); } catch (WrappedResponse wr) { return notFound("Thumbnail not available"); } } // TODO: Rather than only supporting looking up files by their database IDs (dataFileIdSupplied), consider supporting persistent identifiers. @POST @Path("{id}/thumbnail/{dataFileId}") public Response setDataFileAsThumbnail(@PathParam("id") String idSupplied, @PathParam("dataFileId") long dataFileIdSupplied) { try { DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setDatasetFileAsThumbnail, dataFileIdSupplied, null)); return ok("Thumbnail set to " + datasetThumbnail.getBase64image()); } catch (WrappedResponse wr) { return wr.getResponse(); } } @POST @Path("{id}/thumbnail") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response uploadDatasetLogo(@PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream ) { try { DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream)); return ok("Thumbnail is now " + datasetThumbnail.getBase64image()); } catch (WrappedResponse wr) { return wr.getResponse(); } } @DELETE @Path("{id}/thumbnail") public Response removeDatasetLogo(@PathParam("id") String idSupplied) { try { DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.removeThumbnail, null, null)); return ok("Dataset thumbnail removed."); } catch (WrappedResponse wr) { return wr.getResponse(); } } @GET @Path("{identifier}/dataCaptureModule/rsync") public Response getRsync(@PathParam("identifier") String id) { //TODO - does it make sense to switch this to dataset identifier for consistency with the rest of the DCM APIs? if (!DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) { return error(Response.Status.METHOD_NOT_ALLOWED, SettingsServiceBean.Key.UploadMethods + " does not contain " + SystemConfig.FileUploadMethods.RSYNC + "."); } Dataset dataset = null; try { dataset = findDatasetOrDie(id); AuthenticatedUser user = findAuthenticatedUserOrDie(); ScriptRequestResponse scriptRequestResponse = execCommand(new RequestRsyncScriptCommand(createDataverseRequest(user), dataset)); DatasetLock lock = datasetService.addDatasetLock(dataset.getId(), DatasetLock.Reason.DcmUpload, user.getId(), "script downloaded"); if (lock == null) { logger.log(Level.WARNING, "Failed to lock the dataset (dataset id={0})", dataset.getId()); return error(Response.Status.FORBIDDEN, "Failed to lock the dataset (dataset id="+dataset.getId()+")"); } return ok(scriptRequestResponse.getScript(), MediaType.valueOf(MediaType.TEXT_PLAIN), null); } catch (WrappedResponse wr) { return wr.getResponse(); } catch (EJBException ex) { return error(Response.Status.INTERNAL_SERVER_ERROR, "Something went wrong attempting to download rsync script: " + EjbUtil.ejbExceptionToString(ex)); } } /** * This api endpoint triggers the creation of a "package" file in a dataset * after that package has been moved onto the same filesystem via the Data Capture Module. * The package is really just a way that Dataverse interprets a folder created by DCM, seeing it as just one file. * The "package" can be downloaded over RSAL. * * This endpoint currently supports both posix file storage and AWS s3 storage in Dataverse, and depending on which one is active acts accordingly. * * The initial design of the DCM/Dataverse interaction was not to use packages, but to allow import of all individual files natively into Dataverse. * But due to the possibly immense number of files (millions) the package approach was taken. * This is relevant because the posix ("file") code contains many remnants of that development work. * The s3 code was written later and is set to only support import as packages. It takes a lot from FileRecordWriter. * -MAD 4.9.1 */ @POST @Path("{identifier}/dataCaptureModule/checksumValidation") public Response receiveChecksumValidationResults(@PathParam("identifier") String id, JsonObject jsonFromDcm) { logger.log(Level.FINE, "jsonFromDcm: {0}", jsonFromDcm); AuthenticatedUser authenticatedUser = null; try { authenticatedUser = findAuthenticatedUserOrDie(); } catch (WrappedResponse ex) { return error(Response.Status.BAD_REQUEST, "Authentication is required."); } if (!authenticatedUser.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } String statusMessageFromDcm = jsonFromDcm.getString("status"); try { Dataset dataset = findDatasetOrDie(id); if ("validation passed".equals(statusMessageFromDcm)) { logger.log(Level.INFO, "Checksum Validation passed for DCM."); String storageDriver = dataset.getDataverseContext().getEffectiveStorageDriverId(); String uploadFolder = jsonFromDcm.getString("uploadFolder"); int totalSize = jsonFromDcm.getInt("totalSize"); String storageDriverType = System.getProperty("dataverse.file." + storageDriver + ".type"); if (storageDriverType.equals("file")) { logger.log(Level.INFO, "File storage driver used for (dataset id={0})", dataset.getId()); ImportMode importMode = ImportMode.MERGE; try { JsonObject jsonFromImportJobKickoff = execCommand(new ImportFromFileSystemCommand(createDataverseRequest(findUserOrDie()), dataset, uploadFolder, new Long(totalSize), importMode)); long jobId = jsonFromImportJobKickoff.getInt("executionId"); String message = jsonFromImportJobKickoff.getString("message"); JsonObjectBuilder job = Json.createObjectBuilder(); job.add("jobId", jobId); job.add("message", message); return ok(job); } catch (WrappedResponse wr) { String message = wr.getMessage(); return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to put the files into Dataverse. Message was '" + message + "'."); } } else if(storageDriverType.equals("s3")) { logger.log(Level.INFO, "S3 storage driver used for DCM (dataset id={0})", dataset.getId()); try { //Where the lifting is actually done, moving the s3 files over and having dataverse know of the existance of the package s3PackageImporter.copyFromS3(dataset, uploadFolder); DataFile packageFile = s3PackageImporter.createPackageDataFile(dataset, uploadFolder, new Long(totalSize)); if (packageFile == null) { logger.log(Level.SEVERE, "S3 File package import failed."); return error(Response.Status.INTERNAL_SERVER_ERROR, "S3 File package import failed."); } DatasetLock dcmLock = dataset.getLockFor(DatasetLock.Reason.DcmUpload); if (dcmLock == null) { logger.log(Level.WARNING, "Dataset not locked for DCM upload"); } else { datasetService.removeDatasetLocks(dataset, DatasetLock.Reason.DcmUpload); dataset.removeLock(dcmLock); } // update version using the command engine to enforce user permissions and constraints if (dataset.getVersions().size() == 1 && dataset.getLatestVersion().getVersionState() == DatasetVersion.VersionState.DRAFT) { try { Command<Dataset> cmd; cmd = new UpdateDatasetVersionCommand(dataset, new DataverseRequest(authenticatedUser, (HttpServletRequest) null)); commandEngine.submit(cmd); } catch (CommandException ex) { return error(Response.Status.INTERNAL_SERVER_ERROR, "CommandException updating DatasetVersion from batch job: " + ex.getMessage()); } } else { String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a " + "single version in draft mode."; logger.log(Level.SEVERE, constraintError); } JsonObjectBuilder job = Json.createObjectBuilder(); return ok(job); } catch (IOException e) { String message = e.getMessage(); return error(Response.Status.INTERNAL_SERVER_ERROR, "Uploaded files have passed checksum validation but something went wrong while attempting to move the files into Dataverse. Message was '" + message + "'."); } } else { return error(Response.Status.INTERNAL_SERVER_ERROR, "Invalid storage driver in Dataverse, not compatible with dcm"); } } else if ("validation failed".equals(statusMessageFromDcm)) { Map<String, AuthenticatedUser> distinctAuthors = permissionService.getDistinctUsersWithPermissionOn(Permission.EditDataset, dataset); distinctAuthors.values().forEach((value) -> { userNotificationService.sendNotification((AuthenticatedUser) value, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId()); }); List<AuthenticatedUser> superUsers = authenticationServiceBean.findSuperUsers(); if (superUsers != null && !superUsers.isEmpty()) { superUsers.forEach((au) -> { userNotificationService.sendNotification(au, new Timestamp(new Date().getTime()), UserNotification.Type.CHECKSUMFAIL, dataset.getId()); }); } return ok("User notified about checksum validation failure."); } else { return error(Response.Status.BAD_REQUEST, "Unexpected status cannot be processed: " + statusMessageFromDcm); } } catch (WrappedResponse ex) { return ex.getResponse(); } } @POST @Path("{id}/submitForReview") public Response submitForReview(@PathParam("id") String idSupplied) { try { Dataset updatedDataset = execCommand(new SubmitDatasetForReviewCommand(createDataverseRequest(findUserOrDie()), findDatasetOrDie(idSupplied))); JsonObjectBuilder result = Json.createObjectBuilder(); boolean inReview = updatedDataset.isLockedFor(DatasetLock.Reason.InReview); result.add("inReview", inReview); result.add("message", "Dataset id " + updatedDataset.getId() + " has been submitted for review."); return ok(result); } catch (WrappedResponse wr) { return wr.getResponse(); } } @POST @Path("{id}/returnToAuthor") public Response returnToAuthor(@PathParam("id") String idSupplied, String jsonBody) { if (jsonBody == null || jsonBody.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn)."); } StringReader rdr = new StringReader(jsonBody); JsonObject json = Json.createReader(rdr).readObject(); try { Dataset dataset = findDatasetOrDie(idSupplied); String reasonForReturn = null; reasonForReturn = json.getString("reasonForReturn"); // TODO: Once we add a box for the curator to type into, pass the reason for return to the ReturnDatasetToAuthorCommand and delete this check and call to setReturnReason on the API side. if (reasonForReturn == null || reasonForReturn.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must enter a reason for returning a dataset to the author(s)."); } AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie(); Dataset updatedDataset = execCommand(new ReturnDatasetToAuthorCommand(createDataverseRequest(authenticatedUser), dataset, reasonForReturn )); JsonObjectBuilder result = Json.createObjectBuilder(); result.add("inReview", false); result.add("message", "Dataset id " + updatedDataset.getId() + " has been sent back to the author(s)."); return ok(result); } catch (WrappedResponse wr) { return wr.getResponse(); } } @GET @Path("{id}/curationStatus") public Response getCurationStatus(@PathParam("id") String idSupplied) { try { Dataset ds = findDatasetOrDie(idSupplied); DatasetVersion dsv = ds.getLatestVersion(); if (dsv.isDraft() && permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), ds).has(Permission.PublishDataset)) { return response(req -> ok(dsv.getExternalStatusLabel()==null ? "":dsv.getExternalStatusLabel())); } else { return error(Response.Status.FORBIDDEN, "You are not permitted to view the curation status of this dataset."); } } catch (WrappedResponse wr) { return wr.getResponse(); } } @PUT @Path("{id}/curationStatus") public Response setCurationStatus(@PathParam("id") String idSupplied, @QueryParam("label") String label) { Dataset ds = null; User u = null; try { ds = findDatasetOrDie(idSupplied); u = findUserOrDie(); } catch (WrappedResponse wr) { return wr.getResponse(); } try { execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, label)); return ok("Curation Status updated"); } catch (WrappedResponse wr) { // Just change to Bad Request and send return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build(); } } @DELETE @Path("{id}/curationStatus") public Response deleteCurationStatus(@PathParam("id") String idSupplied) { Dataset ds = null; User u = null; try { ds = findDatasetOrDie(idSupplied); u = findUserOrDie(); } catch (WrappedResponse wr) { return wr.getResponse(); } try { execCommand(new SetCurationStatusCommand(createDataverseRequest(u), ds, null)); return ok("Curation Status deleted"); } catch (WrappedResponse wr) { //Just change to Bad Request and send return Response.fromResponse(wr.getResponse()).status(Response.Status.BAD_REQUEST).build(); } } @GET @Path("{id}/uploadsid") @Deprecated public Response getUploadUrl(@PathParam("id") String idSupplied) { try { Dataset dataset = findDatasetOrDie(idSupplied); boolean canUpdateDataset = false; try { canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset).canIssue(UpdateDatasetVersionCommand.class); } catch (WrappedResponse ex) { logger.info("Exception thrown while trying to figure out permissions while getting upload URL for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage()); throw ex; } if (!canUpdateDataset) { return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); } S3AccessIO<?> s3io = FileUtil.getS3AccessForDirectUpload(dataset); if(s3io == null) { return error(Response.Status.NOT_FOUND,"Direct upload not supported for files in this dataset: " + dataset.getId()); } String url = null; String storageIdentifier = null; try { url = s3io.generateTemporaryS3UploadUrl(); storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); } catch (IOException io) { logger.warning(io.getMessage()); throw new WrappedResponse(io, error( Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); } JsonObjectBuilder response = Json.createObjectBuilder() .add("url", url) .add("storageIdentifier", storageIdentifier ); return ok(response); } catch (WrappedResponse wr) { return wr.getResponse(); } } @GET @Path("{id}/uploadurls") public Response getMPUploadUrls(@PathParam("id") String idSupplied, @QueryParam("size") long fileSize) { try { Dataset dataset = findDatasetOrDie(idSupplied); boolean canUpdateDataset = false; try { canUpdateDataset = permissionSvc.requestOn(createDataverseRequest(findUserOrDie()), dataset) .canIssue(UpdateDatasetVersionCommand.class); } catch (WrappedResponse ex) { logger.info( "Exception thrown while trying to figure out permissions while getting upload URLs for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage()); throw ex; } if (!canUpdateDataset) { return error(Response.Status.FORBIDDEN, "You are not permitted to upload files to this dataset."); } S3AccessIO<DataFile> s3io = FileUtil.getS3AccessForDirectUpload(dataset); if (s3io == null) { return error(Response.Status.NOT_FOUND, "Direct upload not supported for files in this dataset: " + dataset.getId()); } JsonObjectBuilder response = null; String storageIdentifier = null; try { storageIdentifier = FileUtil.getStorageIdentifierFromLocation(s3io.getStorageLocation()); response = s3io.generateTemporaryS3UploadUrls(dataset.getGlobalId().asString(), storageIdentifier, fileSize); } catch (IOException io) { logger.warning(io.getMessage()); throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not create process direct upload request")); } response.add("storageIdentifier", storageIdentifier); return ok(response); } catch (WrappedResponse wr) { return wr.getResponse(); } } @DELETE @Path("mpupload") public Response abortMPUpload(@QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { try { Dataset dataset = datasetSvc.findByGlobalId(idSupplied); //Allow the API to be used within a session (e.g. for direct upload in the UI) User user =session.getUser(); if (!user.isAuthenticated()) { try { user = findAuthenticatedUserOrDie(); } catch (WrappedResponse ex) { logger.info( "Exception thrown while trying to figure out permissions while getting aborting upload for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage()); throw ex; } } boolean allowed = false; if (dataset != null) { allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) .canIssue(UpdateDatasetVersionCommand.class); } else { /* * The only legitimate case where a global id won't correspond to a dataset is * for uploads during creation. Given that this call will still fail unless all * three parameters correspond to an active multipart upload, it should be safe * to allow the attempt for an authenticated user. If there are concerns about * permissions, one could check with the current design that the user is allowed * to create datasets in some dataverse that is configured to use the storage * provider specified in the storageidentifier, but testing for the ability to * create a dataset in a specific dataverse would requiring changing the design * somehow (e.g. adding the ownerId to this call). */ allowed = true; } if (!allowed) { return error(Response.Status.FORBIDDEN, "You are not permitted to abort file uploads with the supplied parameters."); } try { S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); } catch (IOException io) { logger.warning("Multipart upload abort failed for uploadId: " + uploadId + " storageidentifier=" + storageidentifier + " dataset Id: " + dataset.getId()); logger.warning(io.getMessage()); throw new WrappedResponse(io, error(Response.Status.INTERNAL_SERVER_ERROR, "Could not abort multipart upload")); } return Response.noContent().build(); } catch (WrappedResponse wr) { return wr.getResponse(); } } @PUT @Path("mpupload") public Response completeMPUpload(String partETagBody, @QueryParam("globalid") String idSupplied, @QueryParam("storageidentifier") String storageidentifier, @QueryParam("uploadid") String uploadId) { try { Dataset dataset = datasetSvc.findByGlobalId(idSupplied); //Allow the API to be used within a session (e.g. for direct upload in the UI) User user =session.getUser(); if (!user.isAuthenticated()) { try { user=findAuthenticatedUserOrDie(); } catch (WrappedResponse ex) { logger.info( "Exception thrown while trying to figure out permissions to complete mpupload for dataset id " + dataset.getId() + ": " + ex.getLocalizedMessage()); throw ex; } } boolean allowed = false; if (dataset != null) { allowed = permissionSvc.requestOn(createDataverseRequest(user), dataset) .canIssue(UpdateDatasetVersionCommand.class); } else { /* * The only legitimate case where a global id won't correspond to a dataset is * for uploads during creation. Given that this call will still fail unless all * three parameters correspond to an active multipart upload, it should be safe * to allow the attempt for an authenticated user. If there are concerns about * permissions, one could check with the current design that the user is allowed * to create datasets in some dataverse that is configured to use the storage * provider specified in the storageidentifier, but testing for the ability to * create a dataset in a specific dataverse would requiring changing the design * somehow (e.g. adding the ownerId to this call). */ allowed = true; } if (!allowed) { return error(Response.Status.FORBIDDEN, "You are not permitted to complete file uploads with the supplied parameters."); } List<PartETag> eTagList = new ArrayList<PartETag>(); logger.info("Etags: " + partETagBody); try { JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); JsonObject object = jsonReader.readObject(); jsonReader.close(); for(String partNo : object.keySet()) { eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); } for(PartETag et: eTagList) { logger.info("Part: " + et.getPartNumber() + " : " + et.getETag()); } } catch (JsonException je) { logger.info("Unable to parse eTags from: " + partETagBody); throw new WrappedResponse(je, error( Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); } try { S3AccessIO.completeMultipartUpload(idSupplied, storageidentifier, uploadId, eTagList); } catch (IOException io) { logger.warning("Multipart upload completion failed for uploadId: " + uploadId +" storageidentifier=" + storageidentifier + " globalId: " + idSupplied); logger.warning(io.getMessage()); try { S3AccessIO.abortMultipartUpload(idSupplied, storageidentifier, uploadId); } catch (IOException e) { logger.severe("Also unable to abort the upload (and release the space on S3 for uploadId: " + uploadId +" storageidentifier=" + storageidentifier + " globalId: " + idSupplied); logger.severe(io.getMessage()); } throw new WrappedResponse(io, error( Response.Status.INTERNAL_SERVER_ERROR, "Could not complete multipart upload")); } return ok("Multipart Upload completed"); } catch (WrappedResponse wr) { return wr.getResponse(); } } /** * Add a File to an existing Dataset * * @param idSupplied * @param jsonData * @param fileInputStream * @param contentDispositionHeader * @param formDataBodyPart * @return */ @POST @Path("{id}/add") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response addFileToDataset(@PathParam("id") String idSupplied, @FormDataParam("jsonData") String jsonData, @FormDataParam("file") InputStream fileInputStream, @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, @FormDataParam("file") final FormDataBodyPart formDataBodyPart ){ if (!systemConfig.isHTTPUpload()) { return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); } // ------------------------------------- // (1) Get the user from the API key // ------------------------------------- User authUser; try { authUser = findUserOrDie(); } catch (WrappedResponse ex) { return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") ); } // ------------------------------------- // (2) Get the Dataset Id // // ------------------------------------- Dataset dataset; try { dataset = findDatasetOrDie(idSupplied); } catch (WrappedResponse wr) { return wr.getResponse(); } //------------------------------------ // (2a) Make sure dataset does not have package file // // -------------------------------------- for (DatasetVersion dv : dataset.getVersions()) { if (dv.isHasPackageFile()) { return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") ); } } // (2a) Load up optional params via JSON //--------------------------------------- OptionalFileParams optionalFileParams = null; msgt("(api) jsonData: " + jsonData); try { optionalFileParams = new OptionalFileParams(jsonData); } catch (DataFileTagException ex) { return error( Response.Status.BAD_REQUEST, ex.getMessage()); } catch (ClassCastException | com.google.gson.JsonParseException ex) { return error(Response.Status.BAD_REQUEST, BundleUtil.getStringFromBundle("file.addreplace.error.parsing")); } // ------------------------------------- // (3) Get the file name and content type // ------------------------------------- String newFilename = null; String newFileContentType = null; String newStorageIdentifier = null; if (null == contentDispositionHeader) { if (optionalFileParams.hasStorageIdentifier()) { newStorageIdentifier = optionalFileParams.getStorageIdentifier(); // ToDo - check that storageIdentifier is valid if (optionalFileParams.hasFileName()) { newFilename = optionalFileParams.getFileName(); if (optionalFileParams.hasMimetype()) { newFileContentType = optionalFileParams.getMimeType(); } } } else { return error(BAD_REQUEST, "You must upload a file or provide a storageidentifier, filename, and mimetype."); } } else { newFilename = contentDispositionHeader.getFileName(); newFileContentType = formDataBodyPart.getMediaType().toString(); } //------------------- // (3) Create the AddReplaceFileHelper object //------------------- msg("ADD!"); DataverseRequest dvRequest2 = createDataverseRequest(authUser); AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper(dvRequest2, ingestService, datasetService, fileService, permissionSvc, commandEngine, systemConfig); //------------------- // (4) Run "runAddFileByDatasetId" //------------------- addFileHelper.runAddFileByDataset(dataset, newFilename, newFileContentType, newStorageIdentifier, fileInputStream, optionalFileParams); if (addFileHelper.hasError()){ return error(addFileHelper.getHttpErrorCode(), addFileHelper.getErrorMessagesAsString("\n")); }else{ String successMsg = BundleUtil.getStringFromBundle("file.addreplace.success.add"); try { //msgt("as String: " + addFileHelper.getSuccessResult()); /** * @todo We need a consistent, sane way to communicate a human * readable message to an API client suitable for human * consumption. Imagine if the UI were built in Angular or React * and we want to return a message from the API as-is to the * user. Human readable. */ logger.fine("successMsg: " + successMsg); String duplicateWarning = addFileHelper.getDuplicateFileWarning(); if (duplicateWarning != null && !duplicateWarning.isEmpty()) { return ok(addFileHelper.getDuplicateFileWarning(), addFileHelper.getSuccessResultAsJsonObjectBuilder()); } else { return ok(addFileHelper.getSuccessResultAsJsonObjectBuilder()); } //"Look at that! You added a file! (hey hey, it may have worked)"); } catch (NoFilesException ex) { Logger.getLogger(Files.class.getName()).log(Level.SEVERE, null, ex); return error(Response.Status.BAD_REQUEST, "NoFileException! Serious Error! See administrator!"); } } } // end: addFileToDataset private void msg(String m){ //System.out.println(m); logger.fine(m); } private void dashes(){ msg("----------------"); } private void msgt(String m){ dashes(); msg(m); dashes(); } public static <T> T handleVersion( String versionId, DsVersionHandler<T> hdl ) throws WrappedResponse { switch (versionId) { case ":latest": return hdl.handleLatest(); case ":draft": return hdl.handleDraft(); case ":latest-published": return hdl.handleLatestPublished(); default: try { String[] versions = versionId.split("\\."); switch (versions.length) { case 1: return hdl.handleSpecific(Long.parseLong(versions[0]), (long)0.0); case 2: return hdl.handleSpecific( Long.parseLong(versions[0]), Long.parseLong(versions[1]) ); default: throw new WrappedResponse(error( Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'")); } } catch ( NumberFormatException nfe ) { throw new WrappedResponse( error( Response.Status.BAD_REQUEST, "Illegal version identifier '" + versionId + "'") ); } } } private DatasetVersion getDatasetVersionOrDie( final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { DatasetVersion dsv = execCommand( handleVersion(versionNumber, new DsVersionHandler<Command<DatasetVersion>>(){ @Override public Command<DatasetVersion> handleLatest() { return new GetLatestAccessibleDatasetVersionCommand(req, ds); } @Override public Command<DatasetVersion> handleDraft() { return new GetDraftDatasetVersionCommand(req, ds); } @Override public Command<DatasetVersion> handleSpecific(long major, long minor) { return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); } @Override public Command<DatasetVersion> handleLatestPublished() { return new GetLatestPublishedDatasetVersionCommand(req, ds); } })); if ( dsv == null || dsv.getId() == null ) { throw new WrappedResponse( notFound("Dataset version " + versionNumber + " of dataset " + ds.getId() + " not found") ); } if (dsv.isReleased()) { MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, ds); mdcLogService.logEntry(entry); } return dsv; } @GET @Path("{identifier}/locks") public Response getLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) { Dataset dataset = null; try { dataset = findDatasetOrDie(id); Set<DatasetLock> locks; if (lockType == null) { locks = dataset.getLocks(); } else { // request for a specific type lock: DatasetLock lock = dataset.getLockFor(lockType); locks = new HashSet<>(); if (lock != null) { locks.add(lock); } } return ok(locks.stream().map(lock -> json(lock)).collect(toJsonArray())); } catch (WrappedResponse wr) { return wr.getResponse(); } } @DELETE @Path("{identifier}/locks") public Response deleteLocks(@PathParam("identifier") String id, @QueryParam("type") DatasetLock.Reason lockType) { return response(req -> { try { AuthenticatedUser user = findAuthenticatedUserOrDie(); if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only."); } Dataset dataset = findDatasetOrDie(id); if (lockType == null) { Set<DatasetLock.Reason> locks = new HashSet<>(); for (DatasetLock lock : dataset.getLocks()) { locks.add(lock.getReason()); } if (!locks.isEmpty()) { for (DatasetLock.Reason locktype : locks) { execCommand(new RemoveLockCommand(req, dataset, locktype)); // refresh the dataset: dataset = findDatasetOrDie(id); } // kick of dataset reindexing, in case the locks removed // affected the search card: try { indexService.indexDataset(dataset, true); } catch (IOException | SolrServerException e) { String failureLogText = "Post lock removal indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); failureLogText += "\r\n" + e.getLocalizedMessage(); LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset); } return ok("locks removed"); } return ok("dataset not locked"); } // request for a specific type lock: DatasetLock lock = dataset.getLockFor(lockType); if (lock != null) { execCommand(new RemoveLockCommand(req, dataset, lock.getReason())); // refresh the dataset: dataset = findDatasetOrDie(id); // ... and kick of dataset reindexing, in case the lock removed // affected the search card: try { indexService.indexDataset(dataset, true); } catch (IOException | SolrServerException e) { String failureLogText = "Post lock removal indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); failureLogText += "\r\n" + e.getLocalizedMessage(); LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset); } return ok("lock type " + lock.getReason() + " removed"); } return ok("no lock type " + lockType + " on the dataset"); } catch (WrappedResponse wr) { return wr.getResponse(); } }); } @POST @Path("{identifier}/lock/{type}") public Response lockDataset(@PathParam("identifier") String id, @PathParam("type") DatasetLock.Reason lockType) { return response(req -> { try { AuthenticatedUser user = findAuthenticatedUserOrDie(); if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "This API end point can be used by superusers only."); } Dataset dataset = findDatasetOrDie(id); DatasetLock lock = dataset.getLockFor(lockType); if (lock != null) { return error(Response.Status.FORBIDDEN, "dataset already locked with lock type " + lockType); } lock = new DatasetLock(lockType, user); execCommand(new AddLockCommand(req, dataset, lock)); // refresh the dataset: dataset = findDatasetOrDie(id); // ... and kick of dataset reindexing: try { indexService.indexDataset(dataset, true); } catch (IOException | SolrServerException e) { String failureLogText = "Post add lock indexing failed. You can kickoff a re-index of this dataset with: \r\n curl http://localhost:8080/api/admin/index/datasets/" + dataset.getId().toString(); failureLogText += "\r\n" + e.getLocalizedMessage(); LoggingUtil.writeOnSuccessFailureLog(null, failureLogText, dataset); } return ok("dataset locked with lock type " + lockType); } catch (WrappedResponse wr) { return wr.getResponse(); } }); } @GET @Path("{id}/makeDataCount/citations") public Response getMakeDataCountCitations(@PathParam("id") String idSupplied) { try { Dataset dataset = findDatasetOrDie(idSupplied); JsonArrayBuilder datasetsCitations = Json.createArrayBuilder(); List<DatasetExternalCitations> externalCitations = datasetExternalCitationsService.getDatasetExternalCitationsByDataset(dataset); for (DatasetExternalCitations citation : externalCitations ){ JsonObjectBuilder candidateObj = Json.createObjectBuilder(); /** * In the future we can imagine storing and presenting more * information about the citation such as the title of the paper * and the names of the authors. For now, we'll at least give * the URL of the citation so people can click and find out more * about the citation. */ candidateObj.add("citationUrl", citation.getCitedByUrl()); datasetsCitations.add(candidateObj); } return ok(datasetsCitations); } catch (WrappedResponse wr) { return wr.getResponse(); } } @GET @Path("{id}/makeDataCount/{metric}") public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @QueryParam("country") String country) { String nullCurrentMonth = null; return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country); } @GET @Path("{identifier}/storagesize") public Response getStorageSize(@PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"), execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached,GetDatasetStorageSizeCommand.Mode.STORAGE, null))))); } @GET @Path("{identifier}/versions/{versionId}/downloadsize") public Response getDownloadSize(@PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version , findDatasetOrDie(dvIdtf), uriInfo, headers)))))); } @GET @Path("{id}/makeDataCount/{metric}/{yyyymm}") public Response getMakeDataCountMetric(@PathParam("id") String idSupplied, @PathParam("metric") String metricSupplied, @PathParam("yyyymm") String yyyymm, @QueryParam("country") String country) { try { Dataset dataset = findDatasetOrDie(idSupplied); NullSafeJsonBuilder jsonObjectBuilder = jsonObjectBuilder(); MakeDataCountUtil.MetricType metricType = null; try { metricType = MakeDataCountUtil.MetricType.fromString(metricSupplied); } catch (IllegalArgumentException ex) { return error(Response.Status.BAD_REQUEST, ex.getMessage()); } String monthYear = null; if (yyyymm != null) { // We add "-01" because we store "2018-05-01" rather than "2018-05" in the "monthyear" column. // Dates come to us as "2018-05-01" in the SUSHI JSON ("begin-date") and we decided to store them as-is. monthYear = MetricsUtil.sanitizeYearMonthUserInput(yyyymm) + "-01"; } if (country != null) { country = country.toLowerCase(); if (!MakeDataCountUtil.isValidCountryCode(country)) { return error(Response.Status.BAD_REQUEST, "Country must be one of the ISO 1366 Country Codes"); } } DatasetMetrics datasetMetrics = datasetMetricsSvc.getDatasetMetricsByDatasetForDisplay(dataset, monthYear, country); if (datasetMetrics == null) { return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + "."); } else if (datasetMetrics.getDownloadsTotal() + datasetMetrics.getViewsTotal() == 0) { return ok("No metrics available for dataset " + dataset.getId() + " for " + yyyymm + " for country code " + country + "."); } Long viewsTotalRegular = null; Long viewsUniqueRegular = null; Long downloadsTotalRegular = null; Long downloadsUniqueRegular = null; Long viewsTotalMachine = null; Long viewsUniqueMachine = null; Long downloadsTotalMachine = null; Long downloadsUniqueMachine = null; Long viewsTotal = null; Long viewsUnique = null; Long downloadsTotal = null; Long downloadsUnique = null; switch (metricSupplied) { case "viewsTotal": viewsTotal = datasetMetrics.getViewsTotal(); break; case "viewsTotalRegular": viewsTotalRegular = datasetMetrics.getViewsTotalRegular(); break; case "viewsTotalMachine": viewsTotalMachine = datasetMetrics.getViewsTotalMachine(); break; case "viewsUnique": viewsUnique = datasetMetrics.getViewsUnique(); break; case "viewsUniqueRegular": viewsUniqueRegular = datasetMetrics.getViewsUniqueRegular(); break; case "viewsUniqueMachine": viewsUniqueMachine = datasetMetrics.getViewsUniqueMachine(); break; case "downloadsTotal": downloadsTotal = datasetMetrics.getDownloadsTotal(); break; case "downloadsTotalRegular": downloadsTotalRegular = datasetMetrics.getDownloadsTotalRegular(); break; case "downloadsTotalMachine": downloadsTotalMachine = datasetMetrics.getDownloadsTotalMachine(); break; case "downloadsUnique": downloadsUnique = datasetMetrics.getDownloadsUnique(); break; case "downloadsUniqueRegular": downloadsUniqueRegular = datasetMetrics.getDownloadsUniqueRegular(); break; case "downloadsUniqueMachine": downloadsUniqueMachine = datasetMetrics.getDownloadsUniqueMachine(); break; default: break; } /** * TODO: Think more about the JSON output and the API design. * getDatasetMetricsByDatasetMonthCountry returns a single row right * now, by country. We could return multiple metrics (viewsTotal, * viewsUnique, downloadsTotal, and downloadsUnique) by country. */ jsonObjectBuilder.add("viewsTotalRegular", viewsTotalRegular); jsonObjectBuilder.add("viewsUniqueRegular", viewsUniqueRegular); jsonObjectBuilder.add("downloadsTotalRegular", downloadsTotalRegular); jsonObjectBuilder.add("downloadsUniqueRegular", downloadsUniqueRegular); jsonObjectBuilder.add("viewsTotalMachine", viewsTotalMachine); jsonObjectBuilder.add("viewsUniqueMachine", viewsUniqueMachine); jsonObjectBuilder.add("downloadsTotalMachine", downloadsTotalMachine); jsonObjectBuilder.add("downloadsUniqueMachine", downloadsUniqueMachine); jsonObjectBuilder.add("viewsTotal", viewsTotal); jsonObjectBuilder.add("viewsUnique", viewsUnique); jsonObjectBuilder.add("downloadsTotal", downloadsTotal); jsonObjectBuilder.add("downloadsUnique", downloadsUnique); return ok(jsonObjectBuilder); } catch (WrappedResponse wr) { return wr.getResponse(); } catch (Exception e) { //bad date - caught in sanitize call return error(BAD_REQUEST, e.getMessage()); } } @GET @Path("{identifier}/storageDriver") public Response getFileStore(@PathParam("identifier") String dvIdtf, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { Dataset dataset; try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } return response(req -> ok(dataset.getEffectiveStorageDriverId())); } @PUT @Path("{identifier}/storageDriver") public Response setFileStore(@PathParam("identifier") String dvIdtf, String storageDriverLabel, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { // Superuser-only: AuthenticatedUser user; try { user = findAuthenticatedUserOrDie(); } catch (WrappedResponse ex) { return error(Response.Status.BAD_REQUEST, "Authentication is required."); } if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } Dataset dataset; try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } // We don't want to allow setting this to a store id that does not exist: for (Entry<String, String> store : DataAccess.getStorageDriverLabels().entrySet()) { if (store.getKey().equals(storageDriverLabel)) { dataset.setStorageDriverId(store.getValue()); datasetService.merge(dataset); return ok("Storage driver set to: " + store.getKey() + "/" + store.getValue()); } } return error(Response.Status.BAD_REQUEST, "No Storage Driver found for : " + storageDriverLabel); } @DELETE @Path("{identifier}/storageDriver") public Response resetFileStore(@PathParam("identifier") String dvIdtf, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { // Superuser-only: AuthenticatedUser user; try { user = findAuthenticatedUserOrDie(); } catch (WrappedResponse ex) { return error(Response.Status.BAD_REQUEST, "Authentication is required."); } if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } Dataset dataset; try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } dataset.setStorageDriverId(null); datasetService.merge(dataset); return ok("Storage reset to default: " + DataAccess.DEFAULT_STORAGE_DRIVER_IDENTIFIER); } @GET @Path("{identifier}/curationLabelSet") public Response getCurationLabelSet(@PathParam("identifier") String dvIdtf, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { try { AuthenticatedUser user = findAuthenticatedUserOrDie(); if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } } catch (WrappedResponse wr) { return wr.getResponse(); } Dataset dataset; try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return ex.getResponse(); } return response(req -> ok(dataset.getEffectiveCurationLabelSetName())); } @PUT @Path("{identifier}/curationLabelSet") public Response setCurationLabelSet(@PathParam("identifier") String dvIdtf, @QueryParam("name") String curationLabelSet, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { // Superuser-only: AuthenticatedUser user; try { user = findAuthenticatedUserOrDie(); } catch (WrappedResponse ex) { return error(Response.Status.UNAUTHORIZED, "Authentication is required."); } if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } Dataset dataset; try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return ex.getResponse(); } if (SystemConfig.CURATIONLABELSDISABLED.equals(curationLabelSet) || SystemConfig.DEFAULTCURATIONLABELSET.equals(curationLabelSet)) { dataset.setCurationLabelSetName(curationLabelSet); datasetService.merge(dataset); return ok("Curation Label Set Name set to: " + curationLabelSet); } else { for (String setName : systemConfig.getCurationLabels().keySet()) { if (setName.equals(curationLabelSet)) { dataset.setCurationLabelSetName(curationLabelSet); datasetService.merge(dataset); return ok("Curation Label Set Name set to: " + setName); } } } return error(Response.Status.BAD_REQUEST, "No Such Curation Label Set"); } @DELETE @Path("{identifier}/curationLabelSet") public Response resetCurationLabelSet(@PathParam("identifier") String dvIdtf, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { // Superuser-only: AuthenticatedUser user; try { user = findAuthenticatedUserOrDie(); } catch (WrappedResponse ex) { return error(Response.Status.BAD_REQUEST, "Authentication is required."); } if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } Dataset dataset; try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return ex.getResponse(); } dataset.setCurationLabelSetName(SystemConfig.DEFAULTCURATIONLABELSET); datasetService.merge(dataset); return ok("Curation Label Set reset to default: " + SystemConfig.DEFAULTCURATIONLABELSET); } @GET @Path("{identifier}/allowedCurationLabels") public Response getAllowedCurationLabels(@PathParam("identifier") String dvIdtf, @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { AuthenticatedUser user = null; try { user = findAuthenticatedUserOrDie(); } catch (WrappedResponse wr) { return wr.getResponse(); } Dataset dataset; try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { return ex.getResponse(); } if (permissionSvc.requestOn(createDataverseRequest(user), dataset).has(Permission.PublishDataset)) { String[] labelArray = systemConfig.getCurationLabels().get(dataset.getEffectiveCurationLabelSetName()); return response(req -> ok(String.join(",", labelArray))); } else { return error(Response.Status.FORBIDDEN, "You are not permitted to view the allowed curation labels for this dataset."); } } @GET @Path("{identifier}/timestamps") @Produces(MediaType.APPLICATION_JSON) public Response getTimestamps(@PathParam("identifier") String id) { Dataset dataset = null; DateTimeFormatter formatter = DateTimeFormatter.ISO_LOCAL_DATE_TIME; try { dataset = findDatasetOrDie(id); User u = findUserOrDie(); Set<Permission> perms = new HashSet<Permission>(); perms.add(Permission.ViewUnpublishedDataset); boolean canSeeDraft = permissionSvc.hasPermissionsFor(u, dataset, perms); JsonObjectBuilder timestamps = Json.createObjectBuilder(); logger.fine("CSD: " + canSeeDraft); logger.fine("IT: " + dataset.getIndexTime()); logger.fine("MT: " + dataset.getModificationTime()); logger.fine("PIT: " + dataset.getPermissionIndexTime()); logger.fine("PMT: " + dataset.getPermissionModificationTime()); // Basic info if it's released if (dataset.isReleased() || canSeeDraft) { timestamps.add("createTime", formatter.format(dataset.getCreateDate().toLocalDateTime())); if (dataset.getPublicationDate() != null) { timestamps.add("publicationTime", formatter.format(dataset.getPublicationDate().toLocalDateTime())); } if (dataset.getLastExportTime() != null) { timestamps.add("lastMetadataExportTime", formatter.format(dataset.getLastExportTime().toInstant().atZone(ZoneId.systemDefault()))); } if (dataset.getMostRecentMajorVersionReleaseDate() != null) { timestamps.add("lastMajorVersionReleaseTime", formatter.format( dataset.getMostRecentMajorVersionReleaseDate().toInstant().atZone(ZoneId.systemDefault()))); } // If the modification/permissionmodification time is // set and the index time is null or is before the mod time, the relevant index is stale timestamps.add("hasStaleIndex", (dataset.getModificationTime() != null && (dataset.getIndexTime() == null || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true : false); timestamps.add("hasStalePermissionIndex", (dataset.getPermissionModificationTime() != null && (dataset.getIndexTime() == null || (dataset.getIndexTime().compareTo(dataset.getModificationTime()) <= 0))) ? true : false); } // More detail if you can see a draft if (canSeeDraft) { timestamps.add("lastUpdateTime", formatter.format(dataset.getModificationTime().toLocalDateTime())); if (dataset.getIndexTime() != null) { timestamps.add("lastIndexTime", formatter.format(dataset.getIndexTime().toLocalDateTime())); } if (dataset.getPermissionModificationTime() != null) { timestamps.add("lastPermissionUpdateTime", formatter.format(dataset.getPermissionModificationTime().toLocalDateTime())); } if (dataset.getPermissionIndexTime() != null) { timestamps.add("lastPermissionIndexTime", formatter.format(dataset.getPermissionIndexTime().toLocalDateTime())); } if (dataset.getGlobalIdCreateTime() != null) { timestamps.add("globalIdCreateTime", formatter .format(dataset.getGlobalIdCreateTime().toInstant().atZone(ZoneId.systemDefault()))); } } return ok(timestamps); } catch (WrappedResponse wr) { return wr.getResponse(); } } /** * Add multiple Files to an existing Dataset * * @param idSupplied * @param jsonData * @return */ @POST @Path("{id}/addFiles") @Consumes(MediaType.MULTIPART_FORM_DATA) public Response addFilesToDataset(@PathParam("id") String idSupplied, @FormDataParam("jsonData") String jsonData) { if (!systemConfig.isHTTPUpload()) { return error(Response.Status.SERVICE_UNAVAILABLE, BundleUtil.getStringFromBundle("file.api.httpDisabled")); } // ------------------------------------- // (1) Get the user from the API key // ------------------------------------- User authUser; try { authUser = findUserOrDie(); } catch (WrappedResponse ex) { return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.addreplace.error.auth") ); } // ------------------------------------- // (2) Get the Dataset Id // ------------------------------------- Dataset dataset; try { dataset = findDatasetOrDie(idSupplied); } catch (WrappedResponse wr) { return wr.getResponse(); } //------------------------------------ // (2a) Make sure dataset does not have package file // -------------------------------------- for (DatasetVersion dv : dataset.getVersions()) { if (dv.isHasPackageFile()) { return error(Response.Status.FORBIDDEN, BundleUtil.getStringFromBundle("file.api.alreadyHasPackageFile") ); } } DataverseRequest dvRequest = createDataverseRequest(authUser); AddReplaceFileHelper addFileHelper = new AddReplaceFileHelper( dvRequest, this.ingestService, this.datasetService, this.fileService, this.permissionSvc, this.commandEngine, this.systemConfig ); return addFileHelper.addFiles(jsonData, dataset, authUser); } /** * API to find curation assignments and statuses * * @return * @throws WrappedResponse */ @GET @Path("/listCurationStates") @Produces("text/csv") public Response getCurationStates() throws WrappedResponse { try { AuthenticatedUser user = findAuthenticatedUserOrDie(); if (!user.isSuperuser()) { return error(Response.Status.FORBIDDEN, "Superusers only."); } } catch (WrappedResponse wr) { return wr.getResponse(); } List<DataverseRole> allRoles = dataverseRoleService.findAll(); List<DataverseRole> curationRoles = new ArrayList<DataverseRole>(); allRoles.forEach(r -> { if (r.permissions().contains(Permission.PublishDataset)) curationRoles.add(r); }); HashMap<String, HashSet<String>> assignees = new HashMap<String, HashSet<String>>(); curationRoles.forEach(r -> { assignees.put(r.getAlias(), null); }); StringBuilder csvSB = new StringBuilder(String.join(",", BundleUtil.getStringFromBundle("dataset"), BundleUtil.getStringFromBundle("datasets.api.creationdate"), BundleUtil.getStringFromBundle("datasets.api.modificationdate"), BundleUtil.getStringFromBundle("datasets.api.curationstatus"), String.join(",", assignees.keySet()))); for (Dataset dataset : datasetSvc.findAllUnpublished()) { List<RoleAssignment> ras = permissionService.assignmentsOn(dataset); curationRoles.forEach(r -> { assignees.put(r.getAlias(), new HashSet<String>()); }); for (RoleAssignment ra : ras) { if (curationRoles.contains(ra.getRole())) { assignees.get(ra.getRole().getAlias()).add(ra.getAssigneeIdentifier()); } } String name = "\"" + dataset.getCurrentName().replace("\"", "\"\"") + "\""; String status = dataset.getLatestVersion().getExternalStatusLabel(); String url = systemConfig.getDataverseSiteUrl() + dataset.getTargetUrl() + dataset.getGlobalId().asString(); String date = new SimpleDateFormat("yyyy-MM-dd").format(dataset.getCreateDate()); String modDate = new SimpleDateFormat("yyyy-MM-dd").format(dataset.getModificationTime()); String hyperlink = "\"=HYPERLINK(\"\"" + url + "\"\",\"\"" + name + "\"\")\""; List<String> sList = new ArrayList<String>(); assignees.entrySet().forEach(e -> sList.add(e.getValue().size() == 0 ? "" : String.join(";", e.getValue()))); csvSB.append("\n").append(String.join(",", hyperlink, date, modDate, status == null ? "" : status, String.join(",", sList))); } csvSB.append("\n"); return ok(csvSB.toString(), MediaType.valueOf(FileUtil.MIME_TYPE_CSV), "datasets.status.csv"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ververica.flink.table.gateway.source.random; import org.apache.flink.table.descriptors.ConnectorDescriptorValidator; import org.apache.flink.table.descriptors.DescriptorProperties; /** * Validator for {@link RandomSource}. */ public class RandomSourceValidator extends ConnectorDescriptorValidator { public static final String CONNECTOR_TYPE_VALUE = "random"; public static final String RANDOM_LIMIT = "random.limit"; // this default value indicates that the random source is an unbounded source public static final int RANDOM_LIMIT_DEFAULT_VALUE = 0; @Override public void validate(DescriptorProperties properties) { super.validate(properties); properties.validateValue(CONNECTOR_TYPE, CONNECTOR_TYPE_VALUE, false); properties.validateInt(RANDOM_LIMIT, true, 1); } }
package algorithm.leetcode.problem.P707; import java.util.ArrayList; import java.util.List; /** * 707. Design Linked List * Easy * 165 * 49 * <p> * <p> * Design your implementation of the linked list. You can choose to use the singly linked list or the doubly linked list. A node in a singly linked list should have two attributes: val and next. val is the value of the current node, and next is a pointer/reference to the next node. If you want to use the doubly linked list, you will need one more attribute prev to indicate the previous node in the linked list. Assume all nodes in the linked list are 0-indexed. * <p> * Implement these functions in your linked list class: * <p> * get(index) : Get the value of the index-th node in the linked list. If the index is invalid, return -1. * addAtHead(val) : Add a node of value val before the first element of the linked list. After the insertion, the new node will be the first node of the linked list. * addAtTail(val) : Append a node of value val to the last element of the linked list. * addAtIndex(index, val) : Add a node of value val before the index-th node in the linked list. If index equals to the length of linked list, the node will be appended to the end of linked list. If index is greater than the length, the node will not be inserted. * deleteAtIndex(index) : Delete the index-th node in the linked list, if the index is valid. * Example: * <p> * MyLinkedList linkedList = new MyLinkedList(); * linkedList.addAtHead(1); * linkedList.addAtTail(3); * linkedList.addAtIndex(1, 2); // linked list becomes 1->2->3 * linkedList.get(1); // returns 2 * linkedList.deleteAtIndex(1); // now the linked list is 1->3 * linkedList.get(1); // returns 3 * Note: * <p> * All values will be in the range of [1, 1000]. * The number of operations will be in the range of [1, 1000]. * Please do not use the built-in LinkedList library. * Accepted * 12,007 * Submissions * 60,873 */ public class MyLinkedList { private Node header; private Node tail; private static int count; class Node { private int value; private Node previous; private Node next; public Node(int value) { this.value = value; } public Node getPrevious() { return previous; } public void setPrevious(Node previous) { this.previous = previous; } public Node getNext() { return next; } public void setNext(Node next) { this.next = next; } } /** * Initialize your data structure here. */ public MyLinkedList() { this.header = null; this.tail = null; this.count = 0; } /** * Get the value of the index-th node in the linked list. If the index is invalid, return -1. */ public int get(int index) { if (index < 0 || index >= count) { return -1; } Node result = header; for (int i = 0; i < index; i++) { if (result != null) { result = result.next; } } return result != null ? result.value : -1; } /** * Add a node of value val before the first element of the linked list. After the insertion, the new node will be the first node of the linked list. */ public void addAtHead(int val) { Node temp = this.header; Node insert = new Node(val); if (temp == null) { this.tail = insert; this.header = insert; } else { insert.next = temp.next; temp.previous = insert; this.header = insert; } count++; } /** * Append a node of value val to the last element of the linked list. */ public void addAtTail(int val) { Node temp = this.tail; Node insert = new Node(val); if (temp == null) { this.tail = insert; this.header = insert; } else { temp.next = insert; insert.previous = temp; this.tail = insert; } count++; } /** * Add a node of value val before the index-th node in the linked list. If index equals to the length of linked list, the node will be appended to the end of linked list. If index is greater than the length, the node will not be inserted. */ public void addAtIndex(int index, int val) { Node result = header; for (int i = 0; i < index; i++) { if (result != null) { result = result.next; } } if (result == null) { addAtTail(val); return; } Node insert = new Node(val); if (header == null) { this.header = insert; this.tail = insert; } else { insert.next = result; insert.previous = result.previous; if (result.previous == null) { result.previous = insert; this.header = insert; } else { result.previous.next = insert; result.previous = insert; } } count++; } /** * Delete the index-th node in the linked list, if the index is valid. */ public void deleteAtIndex(int index) { if (index < 0 || index >= count) { return; } Node result = header; for (int i = 0; i < index; i++) { if (result != null) { result = result.next; } } if (result == null) { return; } if (result.next == null) { if (result.previous == null) { result = null; this.header = null; this.tail = null; } else { result.previous.next = null; this.tail = result.previous; } } else { if (result.previous == null) { result.next.previous = null; this.header = result.next; } else { result.previous.next = result.next; result.next.previous = result.previous; } } count--; } } /** * Your MyLinkedList object will be instantiated and called as such: * MyLinkedList obj = new MyLinkedList(); * int param_1 = obj.get(index); * obj.addAtHead(val); * obj.addAtTail(val); * obj.addAtIndex(index,val); * obj.deleteAtIndex(index); */
/** Copyright 2017 Andrea "Stock" Stocchero Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.pepstock.charba.client.geo; import java.util.Collections; import java.util.List; import org.pepstock.charba.client.commons.ArrayListHelper; import org.pepstock.charba.client.commons.ArrayObject; import org.pepstock.charba.client.commons.ArrayObjectContainerList; import org.pepstock.charba.client.commons.Key; import org.pepstock.charba.client.commons.NativeObject; import org.pepstock.charba.client.commons.NativeObjectContainerFactory; import org.pepstock.charba.client.geo.enums.ClipMap; /** * This is the options handler for data set implementation for GEO charts. * * @author Andrea "Stock" Stocchero * @param <T> type of data point for the specific chart */ final class GeoDatasetHandler<T extends GeoDataPoint> extends CommonOptionsHandler { // exception string message for setting data static final String INVALID_SET_DATA_CALL = "'setData' method is not invokable by a GEO chart. Use 'setValues' method"; // exception string message for getting data static final String INVALID_GET_DATA_CALL = "'getData' method is not invokable by a GEO chart. Use 'getValues' method"; /** * Name of properties of native object. */ private enum Property implements Key { DATA("data"); // name value of property private final String value; /** * Creates with the property value to use in the native object. * * @param value value of property name */ private Property(String value) { this.value = value; } /* * (non-Javadoc) * * @see org.pepstock.charba.client.commons.Key#value() */ @Override public String value() { return value; } } // GEO data factory instance private final NativeObjectContainerFactory<T> factory; /** * Creates a data set. * * @param nativeObject native object of data set * @param defaultClipMap default clip map instance, different between choropleth and bubblemap. * @param factory factory instance to retrieve the data points */ GeoDatasetHandler(NativeObject nativeObject, ClipMap defaultClipMap, NativeObjectContainerFactory<T> factory) { super(nativeObject, defaultClipMap); // stores factory this.factory = factory; } /** * Sets the data property of a data set for a chart is specified as an array of GEO data point, specific for the chart type.. * * @param values an array of GEO data point, specific for the chart type. */ void setValues(T[] values) { setArrayValue(Property.DATA, ArrayObject.fromOrNull(values)); } /** * Sets the data property of a data set for a chart is specified as an array of GEO data point, specific for the chart type.. * * @param values an array of GEO data point, specific for the chart type. */ void setValues(List<T> values) { setArrayValue(Property.DATA, ArrayObject.fromOrNull(values)); } /** * Returns the data property of a data set for a chart is specified as an array of GEO data point, specific for the chart type.. * * @param binding if <code>true</code> binds the new array list in the container * @return list of GEO data point, specific for the chart type.. */ List<T> getValues(boolean binding) { // checks if there is the data if (has(Property.DATA)) { // returns data objects ArrayObject array = getArrayValue(Property.DATA); // returns array return ArrayListHelper.list(array, factory); } // checks if wants to bind the array if (binding) { List<T> result = new ArrayObjectContainerList<>(); // set value setArrayValue(Property.DATA, ArrayObject.fromOrEmpty(result)); // returns list return result; } // returns an empty list return Collections.emptyList(); } }
/* * Entagged Audio Tag library * Copyright (c) 2003-2005 Raphaël Slinckx <raphael@slinckx.net> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ package org.jaudiotagger.audio.flac; import org.jaudiotagger.audio.exceptions.CannotWriteException; import org.jaudiotagger.audio.generic.AudioFileWriter2; import org.jaudiotagger.tag.Tag; import java.nio.channels.FileChannel; /** * Write/delete tag info for Flac file (opensource lossless encoding) */ public class FlacFileWriter extends AudioFileWriter2 { private FlacTagWriter tw = new FlacTagWriter(); @Override protected void writeTag(Tag tag, FileChannel channel, final String fileName) throws CannotWriteException { tw.write(tag, channel, fileName); } @Override protected void deleteTag(Tag tag, FileChannel channel, final String fileName) throws CannotWriteException { tw.delete(tag, channel, fileName); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.cxf.jaxrs; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.net.URL; import javax.servlet.ServletRequest; import javax.ws.rs.HttpMethod; import javax.ws.rs.WebApplicationException; import javax.ws.rs.client.ClientBuilder; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Request; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriInfo; import org.apache.camel.Exchange; import org.apache.camel.LoggingLevel; import org.apache.camel.Message; import org.apache.camel.Processor; import org.apache.camel.RuntimeCamelException; import org.apache.camel.builder.NoErrorHandlerBuilder; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.cxf.CXFTestSupport; import org.apache.camel.component.cxf.common.message.CxfConstants; import org.apache.camel.component.cxf.jaxrs.testbean.Customer; import org.apache.camel.component.cxf.jaxrs.testbean.CustomerService; import org.apache.camel.spi.Registry; import org.apache.camel.test.junit5.CamelTestSupport; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.util.EntityUtils; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.fail; public class CxfRsConsumerTest extends CamelTestSupport { private static final String PUT_REQUEST = "<Customer><name>Mary</name><id>123</id></Customer>"; private static final String CXT = CXFTestSupport.getPort1() + "/CxfRsConsumerTest"; // START SNIPPET: example private static final String CXF_RS_ENDPOINT_URI = "cxfrs://http://localhost:" + CXT + "/rest?resourceClasses=org.apache.camel.component.cxf.jaxrs.testbean.CustomerServiceResource"; private static final String CXF_RS_ENDPOINT_URI2 = "cxfrs://http://localhost:" + CXT + "/rest2?resourceClasses=org.apache.camel.component.cxf.jaxrs.testbean.CustomerService"; private static final String CXF_RS_ENDPOINT_URI3 = "cxfrs://http://localhost:" + CXT + "/rest3?" + "resourceClasses=org.apache.camel.component.cxf.jaxrs.testbean.CustomerServiceNoAnnotations&" + "modelRef=classpath:/org/apache/camel/component/cxf/jaxrs/CustomerServiceModel.xml"; private static final String CXF_RS_ENDPOINT_URI4 = "cxfrs://http://localhost:" + CXT + "/rest4?" + "modelRef=classpath:/org/apache/camel/component/cxf/jaxrs/CustomerServiceDefaultHandlerModel.xml"; private static final String CXF_RS_ENDPOINT_URI5 = "cxfrs://http://localhost:" + CXT + "/rest5?" + "propagateContexts=true&" + "modelRef=classpath:/org/apache/camel/component/cxf/jaxrs/CustomerServiceDefaultHandlerModel.xml"; private static final String CXF_RS_ENDPOINT_URI6 = "cxfrs://http://localhost:" + CXT + "/rest6?" + "performInvocation=true&serviceBeans=#myServiceBean"; @Override protected void bindToRegistry(Registry registry) throws Exception { registry.bind("myServiceBean", new CustomerService()); } @Override protected RouteBuilder createRouteBuilder() throws Exception { final Processor testProcessor = new TestProcessor(); final Processor testProcessor2 = new TestProcessor2(); final Processor testProcessor3 = new TestProcessor3(); return new RouteBuilder() { public void configure() { errorHandler(new NoErrorHandlerBuilder()); from(CXF_RS_ENDPOINT_URI).process(testProcessor); from(CXF_RS_ENDPOINT_URI2).process(testProcessor); from(CXF_RS_ENDPOINT_URI3).process(testProcessor); from(CXF_RS_ENDPOINT_URI4).process(testProcessor2); from(CXF_RS_ENDPOINT_URI5).process(testProcessor3); from(CXF_RS_ENDPOINT_URI6).log(LoggingLevel.OFF, "dummy"); } }; } // END SNIPPET: example private void invokeGetCustomer(String uri, String expect) throws Exception { HttpGet get = new HttpGet(uri); get.addHeader("Accept", "application/json"); CloseableHttpClient httpclient = HttpClientBuilder.create().build(); try { HttpResponse response = httpclient.execute(get); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals(expect, EntityUtils.toString(response.getEntity())); } finally { httpclient.close(); } } @Test public void testGetCustomerInterface() throws Exception { doTestGetCustomer("rest"); } @Test public void testGetCustomerImpl() throws Exception { doTestGetCustomer("rest2"); } @Test public void testGetCustomerInterfaceAndModel() throws Exception { doTestGetCustomer("rest3"); } @Test public void testGetCustomerDefaultHandlerAndModel() throws Exception { doTestGetCustomer("rest4"); } @Test public void testEchoCustomerDefaultHandlerAndModel() throws Exception { WebTarget target = ClientBuilder.newClient().target("http://localhost:" + CXT + "/" + "rest4" + "/customerservice/customers"); Customer c = target.request(MediaType.APPLICATION_JSON).post(Entity.json(new Customer(333, "Barry")), Customer.class); assertEquals(333L, c.getId()); assertEquals("Barry", c.getName()); } @Test public void testGetCustomerDefaultHandlerAndModelAndContexts() throws Exception { doTestGetCustomer("rest5"); } private void doTestGetCustomer(String contextUri) throws Exception { invokeGetCustomer("http://localhost:" + CXT + "/" + contextUri + "/customerservice/customers/126", "{\"Customer\":{\"id\":126,\"name\":\"Willem\"}}"); invokeGetCustomer("http://localhost:" + CXT + "/" + contextUri + "/customerservice/customers/123", "customer response back!"); invokeGetCustomer("http://localhost:" + CXT + "/" + contextUri + "/customerservice/customers/400", "The remoteAddress is 127.0.0.1"); } @Test public void testGetCustomerImplCustomLifecycle() throws Exception { invokeGetCustomer("http://localhost:" + CXT + "/rest6/customerservice/customers/123", "{\"Customer\":{\"id\":123,\"name\":\"John\"}}"); } @Test public void testGetWrongCustomer() throws Exception { URL url; url = new URL("http://localhost:" + CXT + "/rest/customerservice/customers/789"); try { url.openStream(); fail("Expect to get exception here"); } catch (IOException exception) { // expect the Internal error exception } url = new URL("http://localhost:" + CXT + "/rest/customerservice/customers/456"); try { url.openStream(); fail("Expect to get exception here"); } catch (FileNotFoundException exception) { // do nothing here } url = new URL("http://localhost:" + CXT + "/rest/customerservice/customers/234"); try { url.openStream(); fail("Expect to get exception here"); } catch (FileNotFoundException exception) { // do nothing here } url = new URL("http://localhost:" + CXT + "/rest/customerservice/customers/256"); try { url.openStream(); fail("Expect to get exception here"); } catch (IOException exception) { // expect the Internal error exception } } @Test public void testPutConsumer() throws Exception { HttpPut put = new HttpPut("http://localhost:" + CXT + "/rest/customerservice/customers"); StringEntity entity = new StringEntity(PUT_REQUEST, "ISO-8859-1"); entity.setContentType("text/xml; charset=ISO-8859-1"); put.addHeader("test", "header1;header2"); put.setEntity(entity); CloseableHttpClient httpclient = HttpClientBuilder.create().build(); try { HttpResponse response = httpclient.execute(put); assertEquals(200, response.getStatusLine().getStatusCode()); assertEquals("", EntityUtils.toString(response.getEntity())); } finally { httpclient.close(); } } private abstract static class AbstractTestProcessor implements Processor { public void processGetCustomer(Exchange exchange) throws Exception { Message inMessage = exchange.getIn(); String httpMethod = inMessage.getHeader(Exchange.HTTP_METHOD, String.class); assertEquals("GET", httpMethod, "Get a wrong http method"); String path = inMessage.getHeader(Exchange.HTTP_PATH, String.class); // The parameter of the invocation is stored in the body of in message String id = inMessage.getBody(String.class); if ("/customerservice/customers/126".equals(path)) { Customer customer = new Customer(); customer.setId(Long.parseLong(id)); customer.setName("Willem"); // We just put the response Object into the out message body exchange.getOut().setBody(customer); } else { if ("/customerservice/customers/400".equals(path)) { // We return the remote client IP address this time org.apache.cxf.message.Message cxfMessage = inMessage.getHeader(CxfConstants.CAMEL_CXF_MESSAGE, org.apache.cxf.message.Message.class); ServletRequest request = (ServletRequest) cxfMessage.get("HTTP.REQUEST"); // Just make sure the request object is not null assertNotNull(request, "The request object should not be null"); Response r = Response.status(200).entity("The remoteAddress is 127.0.0.1").build(); exchange.getOut().setBody(r); return; } if ("/customerservice/customers/123".equals(path)) { // send a customer response back Response r = Response.status(200).entity("customer response back!").build(); exchange.getOut().setBody(r); return; } if ("/customerservice/customers/456".equals(path)) { Response r = Response.status(404).entity("Can't found the customer with uri " + path) .header("Content-Type", "text/plain").build(); throw new WebApplicationException(r); } else if ("/customerservice/customers/234".equals(path)) { Response r = Response.status(404).entity("Can't found the customer with uri " + path) .header("Content-Type", "text/plain").build(); exchange.getOut().setBody(r); } else if ("/customerservice/customers/789".equals(path)) { exchange.getOut().setBody("Can't found the customer with uri " + path); exchange.getOut().setHeader(Exchange.CONTENT_TYPE, "text/plain"); exchange.getOut().setHeader(Exchange.HTTP_RESPONSE_CODE, "404"); } else { throw new RuntimeCamelException("Can't found the customer with uri " + path); } } } } private static class TestProcessor extends AbstractTestProcessor { @Override public void process(Exchange exchange) throws Exception { Message inMessage = exchange.getIn(); // Get the operation name from in message String operationName = inMessage.getHeader(CxfConstants.OPERATION_NAME, String.class); if ("getCustomer".equals(operationName)) { processGetCustomer(exchange); } else if ("updateCustomer".equals(operationName)) { assertEquals("header1;header2", inMessage.getHeader("test"), "Get a wrong customer message header"); String httpMethod = inMessage.getHeader(Exchange.HTTP_METHOD, String.class); assertEquals("PUT", httpMethod, "Get a wrong http method"); Customer customer = inMessage.getBody(Customer.class); assertNotNull(customer, "The customer should not be null."); // Now you can do what you want on the customer object assertEquals("Mary", customer.getName(), "Get a wrong customer name."); // set the response back exchange.getOut().setBody(Response.ok().build()); } } } private static class TestProcessor2 extends AbstractTestProcessor { @Override public void process(Exchange exchange) throws Exception { Message inMessage = exchange.getIn(); // Get the operation name from in message String path = inMessage.getHeader(Exchange.HTTP_PATH, String.class); if (path.startsWith("/customerservice/customers")) { String httpMethod = inMessage.getHeader(Exchange.HTTP_METHOD, String.class); if (HttpMethod.GET.equals(httpMethod)) { processGetCustomer(exchange); } else if (HttpMethod.POST.equals(httpMethod)) { InputStream inBody = exchange.getIn().getBody(InputStream.class); exchange.getOut().setBody(Response.ok(inBody).build()); } } } } private static class TestProcessor3 extends AbstractTestProcessor { @Override public void process(Exchange exchange) throws Exception { UriInfo ui = exchange.getProperty(UriInfo.class.getName(), UriInfo.class); String path = ui.getPath(); Request req = exchange.getProperty(Request.class.getName(), Request.class); String httpMethod = req.getMethod(); if (path.startsWith("customerservice/customers") && HttpMethod.GET.equals(httpMethod)) { processGetCustomer(exchange); } } } }
package pb.proto; import java.awt.geom.Point2D; import physics.Circle; import physics.Vect; /** * Sent when a ball is teleported across portals. */ public class PortalBallMessage extends Message { /** The name of the board that the ball comes from. */ private final String fromBoard; /** * The name of the portal that the ball comes from. * * If the ball arrives to a board that does not contain the target portal, * it is sent back to the source board and portal. If the network is in a * good mood, it is as if the ball never left the board. */ private final String fromPortal; /** The name of the board that the ball should be teleported to. */ private final String toBoard; /** The name of the portal that the ball should be teleported to. */ private final String toPortal; /** The ball's name. */ private final String ballName; /** The center and radius of the ball that is teleported. */ private final Circle shape; /** The velocity of the ball that is teleported. */ private final Vect velocity; // Rep invariant: // everything is non-null // all strings are non-empty // Thread safety: // all fields are immutable, just like for Message /** * Creates a message for teleporting a ball across portals. * * @param fromBoard the name of the board that the ball comes from * @param fromPortal the name of the portal that the ball comes from * @param toBoard the name of the board that the ball should be teleported * to * @param toPortal tre name of the portal that the ball should be teleported * to * @param ballName the ball's name * @param center the ball's center * @param velocity the ball's velocity */ public PortalBallMessage(String fromBoard, String fromPortal, String toBoard, String toPortal, String ballName, Circle shape, Vect velocity) { assert fromBoard != null; assert fromBoard.length() != 0; assert fromPortal != null; assert fromPortal.length() != 0; assert toBoard != null; assert toBoard.length() != 0; assert toPortal != null; assert toPortal.length() != 0; assert ballName != null; assert ballName.length() != 0; assert shape != null; assert velocity != null; this.fromBoard = fromBoard; this.fromPortal = fromPortal; this.toBoard = toBoard; this.toPortal = toPortal; this.ballName = ballName; this.shape = shape; this.velocity = velocity; } public String getFromPortal() { return fromPortal; } public String getFromBoard() { return fromBoard; } public String getToPortal() { return toPortal; } public String getToBoard() { return toBoard; } public String getBallName() { return ballName; } public Circle getShape() { return shape; } public Vect getVelocity() { return velocity; } @Override protected String name() { return NAME; } @Override public String toLine() { return NAME + " " + fromBoard + " " + fromPortal + " " + toBoard + " " + toPortal + " " + ballName + " " + shape.getCenter().x() + " " + shape.getCenter().y() + " " + shape.getRadius() + " " + velocity.x() + " " + velocity.y(); } // NOTE: The stuff below is package-private on purpose. /** This message's name. */ static final String NAME = "portalball"; /** * Creates a message from a line of text received from a socket. * @param tokens strings that were separated by spaces on the line */ PortalBallMessage(String[] tokens) { assert tokens[0].equals(NAME); assert tokens.length >= 8; try { this.fromBoard = tokens[1]; this.fromPortal = tokens[2]; this.toBoard = tokens[3]; this.toPortal = tokens[4]; this.ballName = tokens[5]; double cx = Double.parseDouble(tokens[6]); double cy = Double.parseDouble(tokens[7]); double radius = Double.parseDouble(tokens[8]); double vx = Double.parseDouble(tokens[9]); double vy = Double.parseDouble(tokens[10]); this.shape = new Circle(new Point2D.Double(cx, cy), radius); this.velocity = new Vect(vx, vy); } catch(NumberFormatException e) { throw new IllegalArgumentException("Invalid version number", e); } } }
package com.odia.alphabet.utils; import android.animation.AnimatorSet; import android.animation.ArgbEvaluator; import android.animation.ObjectAnimator; import android.animation.ValueAnimator; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Typeface; import android.util.AttributeSet; import android.util.Log; import android.view.Gravity; import android.view.MotionEvent; import android.view.animation.AccelerateDecelerateInterpolator; import android.view.animation.Interpolator; import android.view.animation.LinearInterpolator; import android.widget.TextView; import com.odia.alphabet.R; /** * Created by deeptiman on 8/12/2017. */ @SuppressLint("AppCompatCustomView") public class RoundButton extends TextView { float mCenterX; float mCenterY; private Bitmap b; private RoundButton.Coord mCoord; private float mRadius; private int radius = 0; public boolean ripleEffect; private String fontStyle = ""; private int circle_x, circle_y; private final int maxWidth = 80; private final int maxHeight = 80; private Paint mPaint, mRectPaint; private final int BORDER_RADIUS = 6; private Paint circlePaint, circleBorder; private int startcolor, endcolor, user_given_radius; private int circle_color, circle_hover_color, default_color, circle_border_color, circle_border_radius, cr_icon; private int drawCount = 0; public RoundButton(Context context) { super(context); init(null); } public RoundButton(Context context, AttributeSet attrs) { super(context, attrs); init(attrs); } public RoundButton(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(attrs); } private void init(AttributeSet attrs) { mCoord = new RoundButton.Coord(); mPaint = new Paint(); mRectPaint = new Paint(); circlePaint = new Paint(); mPaint.setAntiAlias(true); circleBorder = new Paint(); mRectPaint.setAntiAlias(true); circlePaint.setAntiAlias(true); circleBorder.setAntiAlias(true); TypedArray properties = getContext().obtainStyledAttributes(attrs, R.styleable.MyCircleView, 0, 0); if (properties != null) { try { setW(properties.getInt(R.styleable.MyCircleView_cub_w, 0)); setH(properties.getInt(R.styleable.MyCircleView_cub_h, 0)); setCircle_color(properties.getInt(R.styleable.MyCircleView_cub_color, Color.BLACK)); setCircle_hover_color(properties.getInt(R.styleable.MyCircleView_cub_hover_color, Color.GRAY)); setCircle_border_color(properties.getInt(R.styleable.MyCircleView_cub_border_color, Color.WHITE)); user_given_radius = properties.getDimensionPixelSize(R.styleable.MyCircleView_cub_border_radius, BORDER_RADIUS); setCircle_border_radius(20); int btnIcon = properties.getResourceId(R.styleable.MyCircleView_cub_icon, 0); // if(btnIcon!=0){ setCr_icon(btnIcon); //setIconBitmap(convertToBitMap(getCr_icon())); //} // cr_icon = properties.getResourceId(R.styleable.MyCircleView_cub_icon, 0); ripleEffect = properties.getBoolean(R.styleable.MyCircleView_cub_riple_effect, false); fontStyle = properties.getString(R.styleable.MyCircleView_cub_fontstyle); if (fontStyle != null) { Typeface typeFace = Typeface.createFromAsset(getContext().getAssets(), fontStyle); setTypeface(typeFace); } startcolor = getCircle_color(); default_color = getCircle_color(); endcolor = getCircle_hover_color(); mPaint.setColor(Color.parseColor("#0DFFFFFF")); mRectPaint.setColor(Color.parseColor("#0DFFFFFF")); // b = BitmapFactory.decodeResource(getResources(), cr_icon); // // setCr_icon(cr_icon); } catch (Exception e) { } finally { properties.recycle(); } } if (getCr_icon() !=null) {setText("");} else {setText(getText());} } void setTextIfChanged(TextView tv, CharSequence text) { if (!text.equals(tv.getText())) tv.setText(text); } @Override protected void onDraw(Canvas canvas) { //canvas.save(); drawCount++; Log.w(this.getClass().getName(),"onDraw of Balls called. Total draws:" + Integer.toString(drawCount)); int half_width = this.getWidth() / 2; int half_height = this.getHeight() / 2; radius = Math.min(half_width, half_height) / 4; if (half_width > half_height) { radius = half_height - 10; } else { radius = half_width - 10; } circle_x = half_width; circle_y = half_height; circlePaint.setStyle(Paint.Style.FILL); circlePaint.setColor(default_color); canvas.drawCircle(half_width, half_height, radius, circlePaint);//ORIGNAL CIRCLE if (getCircle_border_radius() != 0) { circleBorder.setStyle(Paint.Style.STROKE); circleBorder.setStrokeWidth(getCircle_border_radius()); circleBorder.setColor(getCircle_border_color()); this.setLayerType(LAYER_TYPE_HARDWARE, circleBorder); circleBorder.setShadowLayer(5.0f, 0.0f, 3.0f, Color.GRAY); canvas.drawCircle(half_width, half_height, radius, circleBorder); //BORDER CIRCLE } if (getCr_icon() !=null) { imageIcon(canvas, circlePaint, half_width, half_height); setTextIfChanged(this,""); } else { //setText(getText()); setTextIfChanged(this,getText().toString()); } setGravity(Gravity.CENTER); if (ripleEffect) { if (mCoord.x != 0 && mCoord.y != 0) { canvas.drawCircle(mCoord.x, mCoord.y, mRadius, mPaint); } } // canvas.save(); super.onDraw(canvas); } @Override public boolean onTouchEvent(MotionEvent event) { if (event.getActionMasked() == MotionEvent.ACTION_DOWN) { if (inCircle(event.getX(), event.getY(), circle_x, circle_y, radius)) { mCenterX = (getTranslationX() + getWidth()) / 2.0f; mCenterY = (getTranslationY() + getHeight()) / 2.0f; mCoord.setX(event.getX()); mCoord.setY(event.getY()); if (ripleEffect == true) { rippleAnimation(); } } } super.onTouchEvent(event); int action = event.getAction(); switch (action) { case MotionEvent.ACTION_UP: if (inCircle(event.getX(), event.getY(), circle_x, circle_y, radius)) { setColorAnimation(endcolor, startcolor); } else { setColorAnimation(endcolor, startcolor); } break; case MotionEvent.ACTION_DOWN: if (inCircle(event.getX(), event.getY(), circle_x, circle_y, radius)) { setColorAnimation(startcolor, endcolor); } break; case MotionEvent.ACTION_CANCEL: Log.e("called", "cancel else"); default_color = getCircle_color(); setColorAnimation(endcolor, startcolor); break; } return true; } private boolean inCircle(float x, float y, float circleCenterX, float circleCenterY, float circleRadius) { double dx = Math.pow(x - circleCenterX, 2); double dy = Math.pow(y - circleCenterY, 2); if ((dx + dy) < Math.pow(circleRadius, 2)) { return true; } else { return false; } } public void imageIcon(final Canvas canvas, final Paint p, final int p1,final int p2) { if(getCr_icon()!=null){ Bitmap b2 = scaleBitmap(getCr_icon()); canvas.drawBitmap(b2, p1 - b2.getWidth() * 0.5f, p2 - b2.getHeight() * 0.5f, null); } } private Bitmap scaleBitmap(Bitmap bm) { if(bm!=null){ int width = bm.getWidth(); int height = bm.getHeight(); if (width > height) { float ratio = (float) width / maxWidth; width = maxWidth; height = (int) (height / ratio); } else if (height > width) { float ratio = (float) height / maxHeight; height = maxHeight; width = (int) (width / ratio); } else { height = maxHeight; width = maxWidth; } bm = Bitmap.createScaledBitmap(bm, width+w, height+h, true); } return bm; } public void setColorAnimation(int start, int end) { ValueAnimator animator = ValueAnimator.ofObject(new ArgbEvaluator(), start, end); animator.setDuration(500); animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { default_color = (int) animation.getAnimatedValue(); invalidate(); } }); animator.setInterpolator(new AccelerateDecelerateInterpolator()); animator.start(); } public void rippleAnimation() { Interpolator interpolator = new LinearInterpolator(); long duration = 500; ObjectAnimator animRadius = ObjectAnimator.ofFloat(this, "radius", 10f, getWidth() / 3f); animRadius.setInterpolator(interpolator); animRadius.setDuration(duration); ObjectAnimator animAlpha = ObjectAnimator.ofInt(mPaint, "alpha", 200, 0); animAlpha.setInterpolator(interpolator); animAlpha.setDuration(duration); ObjectAnimator animX = ObjectAnimator.ofFloat(mCoord, "x", mCoord.x, mCenterX); animX.setInterpolator(interpolator); animX.setDuration(duration); ObjectAnimator animY = ObjectAnimator.ofFloat(mCoord, "y", mCoord.y, mCenterY); animY.setInterpolator(interpolator); animY.setDuration(duration); ObjectAnimator animRectAlpha = ObjectAnimator.ofInt(mRectPaint, "alpha", 0, 100, 0); animRectAlpha.setInterpolator(interpolator); animRectAlpha.setDuration(duration); AnimatorSet animSetAlphaRadius = new AnimatorSet(); animSetAlphaRadius.playTogether(animRadius, animAlpha, animX, animY, animRectAlpha); animSetAlphaRadius.start(); } // GETTER SETTER---------------------------- public int getCircle_color() { return circle_color; } int w,h; public void setW(int w){ this.w = w; } public void setH(int h){ this.h = h; } public void setCircle_color(int circle_color) { this.circle_color = circle_color; } public int getCircle_hover_color() { return circle_hover_color; } public void setCircle_hover_color(int circle_hover_color) { this.circle_hover_color = circle_hover_color; } public int getCircle_border_color() { return circle_border_color; } public void setCircle_border_color(int circle_border_color) { this.circle_border_color = circle_border_color; } public int getCircle_border_radius() { return circle_border_radius; } public void setCircle_border_radius(int circle_border_radius) { this.circle_border_radius = circle_border_radius; } public Bitmap getCr_icon() { return convertToBitMap(cr_icon); } public void setCr_icon(int cr_icon) { this.cr_icon = cr_icon; } private Bitmap convertToBitMap(int id){ Bitmap icon = BitmapFactory.decodeResource(getResources(),id); return icon; } public void setRadius(final float radius) { mRadius = radius; } // GETTER SETTER---------------------------- private class Coord { public float x = 0; public float y = 0; public Coord() { } public Coord(float xValue, float yValue) { this.x = xValue; this.y = yValue; } private void setX(float value) { this.x = value; } private void setY(float value) { this.y = value; } } }
import java.io.*; import java.net.*; import java.security.*; import java.security.cert.CertificateException; import java.util.Arrays; import javax.net.ssl.*; public class SecureOrderTaker { public final static int PORT = 7000; public final static String algorithm = "SSL"; public static void main(String[] args) { try { SSLContext context = SSLContext.getInstance(algorithm); // The reference implementation only supports X.509 keys KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); // Oracle's default kind of key store KeyStore ks = KeyStore.getInstance("JKS"); // For security, every key store is encrypted with a // passphrase that must be provided before we can load // it from disk. The passphrase is stored as a char[] array // so it can be wiped from memory quickly rather than // waiting for a garbage collector. char[] password = System.console().readPassword(); ks.load(new FileInputStream("jnp4e.keys"), password); kmf.init(ks, password); context.init(kmf.getKeyManagers(), null, null); // wipe the password Arrays.fill(password, '0'); SSLServerSocketFactory factory = context.getServerSocketFactory(); SSLServerSocket server = (SSLServerSocket) factory.createServerSocket(PORT); // add anonymous (non-authenticated) cipher suites String[] supported = server.getSupportedCipherSuites(); String[] anonCipherSuitesSupported = new String[supported.length]; int numAnonCipherSuitesSupported = 0; for (int i = 0; i < supported.length; i++) { if (supported[i].indexOf("_anon_") > 0) { anonCipherSuitesSupported[numAnonCipherSuitesSupported++] = supported[i]; } } String[] oldEnabled = server.getEnabledCipherSuites(); String[] newEnabled = new String[oldEnabled.length + numAnonCipherSuitesSupported]; System.arraycopy(oldEnabled, 0, newEnabled, 0, oldEnabled.length); System.arraycopy(anonCipherSuitesSupported, 0, newEnabled, oldEnabled.length, numAnonCipherSuitesSupported); server.setEnabledCipherSuites(newEnabled); // Now all the set up is complete and we can focus // on the actual communication. while (true) { // This socket will be secure, // but there's no indication of that in the code! try (Socket theConnection = server.accept()) { InputStream in = theConnection.getInputStream(); int c; while ((c = in.read()) != -1) { System.out.write(c); } } catch (IOException ex) { ex.printStackTrace(); } } } catch (IOException | KeyManagementException | KeyStoreException | NoSuchAlgorithmException | CertificateException | UnrecoverableKeyException ex) { ex.printStackTrace(); } } }
/* * Copyright 2017-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0/ * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazon.aace.core; import android.content.Context; import android.util.Log; import com.amazon.aace.core.config.EngineConfiguration; import org.json.JSONArray; import org.json.JSONObject; import java.io.InputStream; /** * The Engine must be instantiated by the platform implementation. Along with the platform interfaces, * it is responsible for managing the interactions between the platform and AVS. * The platform implementation should *not* extend Engine; it is extended in the SDK. */ final public class Engine extends NativeRef { private static final String TAG = "CoreEngine"; private MessageBroker mMessageBroker = null; private Engine() {} /** * Creates a new instance of an Engine object. */ static public Engine create(Context context) { loadModuleLibraries(context); return new Engine(); } /** * Sets the Engine configuration to a set of configuration objects * * @param configurationList A collection of @c com.amazon.aace.core.EngineConfiguration objects as an * @c EngineConfiguration[] * @return @c true if the Engine configuration was successful, else @c false */ public final boolean configure(EngineConfiguration[] configurationList) { long[] configurationRefList = new long[configurationList.length]; for (int j = 0; j < configurationList.length; j++) { configurationRefList[j] = configurationList[j].getNativeRef(); } return configure(getNativeRef(), configurationRefList); } /** * Starts the Engine and attempts to establish a connection to AVS * * @return @c true if the Engine was started, else @c false * * @sa stop() */ public final boolean start() { return start(getNativeRef()); } /** * Stops the Engine and shuts down the connection to AVS * * @return @c true if the Engine was stopped, else @c false * * @sa start() */ public final boolean stop() { return stop(getNativeRef()); } /** * Registers a @c PlatformInterface instance with the Engine * The platform implementation must register each interface required by the application. * * @param platformInterface The @c PlatformInterface instance to register * * @return @c true if the @c PlatformInterface instance was registered, else @c false * * @sa com.amazon.aace.core.PlatformInterface */ public final boolean registerPlatformInterface(PlatformInterface platformInterface) { return registerPlatformInterface(getNativeRef(), platformInterface.getNativeRef()); } /** * Returns the @c MessageBroker instance for the Engine * * @return @c MessageBroker * * @sa com.amazon.aace.core.MessageBroker */ public final MessageBroker getMessageBroker() { if(mMessageBroker == null) { mMessageBroker = getMessageBroker(getNativeRef()); } return mMessageBroker; } // Retrieve library names of built modules private static void loadModuleLibraries(Context context) { try { String folderName = "meta-aac"; String libraryKey = "libraries"; String category = "name"; String[] fileList = context.getAssets().list(folderName); for (String s : fileList) { InputStream is = context.getAssets().open(folderName + "/" + s); byte[] buffer = new byte[is.available()]; is.read(buffer); String json = new String(buffer, "UTF-8"); JSONObject obj = new JSONObject(json); if (obj != null) { JSONArray jsonArray = obj.getJSONArray(libraryKey); for (int i = 0; i < jsonArray.length(); i++) { String libraryName = jsonArray.getJSONObject(i).getString(category); System.loadLibrary(libraryName); } } is.close(); } } catch (Exception e) { Log.e(TAG, e.getMessage()); } } public final boolean setNativeEnv(String name, String value) { return setNativeEnv(getNativeRef(), name, value); } protected long createNativeRef() { return createBinder(); } protected void disposeNativeRef(long nativeRef) { disposeBinder(nativeRef); } // Native Engine JNI methods private native long createBinder(); private native void disposeBinder(long nativeRef); private native boolean configure(long nativeRef, long[] configurationRefList); private native boolean start(long nativeRef); private native boolean stop(long nativeRef); private native boolean registerPlatformInterface(long nativeRef, long platformInterfaceRef); private native boolean setNativeEnv(long nativeRef, String name, String value); private native MessageBroker getMessageBroker(long nativeRef); }
package lia.Monitor.Store.Fast; import java.io.Serializable; import java.util.logging.Level; import java.util.logging.Logger; import lia.Monitor.monitor.AppConfig; import lia.Monitor.monitor.AppConfigChangeListener; import lia.Monitor.monitor.ExtendedResult; import lia.Monitor.monitor.Result; import lia.util.ntp.NTPDate; /** * @author costing * @since forever */ public final class CacheElement implements Serializable { /** * This way we won't see any more errors in the logs :) */ private static final long serialVersionUID = -4347639138018062968L; private static final Logger logger = Logger.getLogger(CacheElement.class.getName()); /** * Average value */ double dValue; /** * Previous value */ double dPrevValue; /** * Min value on the current interval */ double dMin; /** * Max value on the current interval */ double dMax; /** * Timestamp of the last received value */ long lLastUpdate; /** * Timestamp of the last write to the database */ long lLastWritten; /** * One of the Result objects, kept for ID only */ final Result r; /** * Parameter index from the Result above */ final int iParam; /** * Compacting interval */ long lInterval; /** * Flag to signal if we have some unwritten data */ boolean bSomeData; /** * Flag to signal if the value was the only one received in this interval */ boolean bWasFirst; /** * The object that does the actual write */ transient WriterInterface writer; /** * Accept values that are at most this long in the future (in milliseconds). It is set by the configuration option: * <code> * lia.Monitor.Store.ALLOW_FUTURE=(interval, in seconds, defaults to 10 minutes) * </code> * You can set it to 0 to skip checking. */ private static long ALLOW_FUTURE = 600000; /** * When the configuration file is touched, reload parameters */ static final void reloadConfig() { ALLOW_FUTURE = AppConfig.getl("lia.Monitor.Store.ALLOW_FUTURE", 600) * 1000; } static { reloadConfig(); AppConfig.addNotifier(new AppConfigChangeListener() { @Override public void notifyAppConfigChanged() { reloadConfig(); } }); } /** * The only constructor * * @param _lInterval compact interval * @param result template * @param _iParam parameter index in the Result above * @param lTime current timestamp * @param bWrite whether or not to write this first value * @param w actual writer */ public CacheElement(final long _lInterval, final Result result, final int _iParam, final long lTime, final boolean bWrite, final WriterInterface w) { r = result; iParam = _iParam; lInterval = _lInterval; lLastWritten = lTime - lInterval; lLastUpdate = lTime - lInterval; dValue = r.param[iParam]; bSomeData = false; writer = w; update(result, bWrite); } /** * Used after a de-serialization to set the writer to a real one * * @param w */ public synchronized void setWriter(final WriterInterface w) { writer = w; } /** * Add a new value * * @param _r * @param bWrite * @return true if the value was taken into account, false if it was ignored */ public boolean update(final Result _r, final boolean bWrite) { if (_r instanceof ExtendedResult) { final ExtendedResult er = (ExtendedResult) _r; if (er.param == null) { return false; } if (er.param.length == 1) { return update(er.param[0], er.min, er.max, er.time, bWrite); } if ((iParam < er.param_name.length) && er.param_name[iParam].equals(r.param_name[iParam])) { return update(er.param[iParam], er.min, er.max, er.time, bWrite); } for (int i = 0; i < er.param_name.length; i++) { if ((i != iParam) && er.param_name[i].equals(r.param_name[iParam])) { return update(er.param[i], er.min, er.max, _r.time, bWrite); } } return false; } if ((iParam < _r.param_name.length) && _r.param_name[iParam].equals(r.param_name[iParam])) { return update(_r.param[iParam], _r.param[iParam], _r.param[iParam], _r.time, bWrite); } for (int i = 0; i < _r.param_name.length; i++) { if ((i != iParam) && _r.param_name[i].equals(r.param_name[iParam])) { return update(_r.param[i], _r.param[i], _r.param[i], _r.time, bWrite); } } return false; } /** * Write the last unwritten value to the database. * * @return true if there was something left to write and the write was a success */ public synchronized boolean flush() { if (bSomeData) { bSomeData = false; if (!bWasFirst) { lLastWritten += lInterval; lLastUpdate = lLastWritten; return writer.insert(lLastWritten - (lInterval / 2), r, iParam, dValue, dMin, dMax); } } return false; } /** * Check whether or not the unwritten value should be flushed * * @param lUpdateTime * @return true if the value was flushed, false if not */ public synchronized boolean checkFlush(final long lUpdateTime) { if (bSomeData && ((lUpdateTime - lLastWritten) > (2 * lInterval))) { return flush(); } return false; } /** * Add a new value * * @param dNewValue the new average value * @param dNewMin the new min value * @param dNewMax the new max value * @param lUpdateTime timestamp of this value * @param bWrite true = write, false = do not write this value (was previously read from the db) * @return true if the value was taken into account, false if it was ignored */ public synchronized boolean update(final double dNewValue, final double dNewMin, final double dNewMax, final long lUpdateTime, final boolean bWrite) { if (lUpdateTime <= lLastUpdate) { //debug("Returning false because update time < last update: "+lUpdateTime+", "+lLastUpdate); return false; } if (lLastUpdate < lLastWritten) { //debug("marking some data = false because last update < last written : "+lLastUpdate+", "+lLastWritten); bSomeData = false; } if ((ALLOW_FUTURE > 0) && (lUpdateTime > (NTPDate.currentTimeMillis() + ALLOW_FUTURE))) { // allow 1 minute positive data offset logger.log( Level.FINE, "update: return FALSE because lUpdateTime > now()" + r + "\n" + "lUpdateTime = " + lUpdateTime + " : " + (new java.util.Date(lUpdateTime)) + "\n" + "now = " + NTPDate.currentTimeMillis() + " : " + (new NTPDate())); //debug("Returning false because update time is in the future: "+lUpdateTime); return false; } checkFlush(lUpdateTime); if (!bSomeData) { dMin = dNewMin; dMax = dNewMax; dPrevValue = dNewValue; bWasFirst = bWrite; if ((lUpdateTime - lLastWritten) >= lInterval) { while ((lUpdateTime - lLastWritten) >= lInterval) { lLastWritten += lInterval; } if (bWrite) { //debug("direct write because some data = false and write = true: "+lLastWritten); writer.insert(lLastWritten - (lInterval / 2), r, iParam, dNewValue, dNewMin, dNewMax); } } lLastUpdate = lUpdateTime; dValue = dNewValue; bSomeData = true; return true; } bWasFirst = false; if ((lUpdateTime - lLastWritten) >= lInterval) { // we should write a new value to the database final double dIntersect = dPrevValue + (((dNewValue - dPrevValue) * ((lLastWritten + lInterval) - lLastUpdate)) / (lUpdateTime - lLastUpdate)); final double avg = ((dValue * (lLastUpdate - lLastWritten)) + (((dPrevValue + dIntersect) / 2) * ((lLastWritten + lInterval) - lLastUpdate))) / lInterval; dMin = Math.min(dMin, dIntersect); dMax = Math.max(dMax, dIntersect); lLastWritten += lInterval; //debug("writing normal averaged data : lLastWritten = "+lLastWritten); writer.insert(lLastWritten - (lInterval / 2), r, iParam, avg, dMin, dMax); dValue = (dIntersect + dNewValue) / 2; dPrevValue = dNewValue; dMin = Math.min(dIntersect, dNewMin); dMax = Math.max(dIntersect, dNewMax); lLastUpdate = lUpdateTime; } else { //debug("just averaging the data"); double avg; if (lUpdateTime > lLastWritten) { avg = ((dValue * (lLastUpdate - lLastWritten)) + (((dNewValue + dPrevValue) / 2) * (lUpdateTime - lLastUpdate))) / (lUpdateTime - lLastWritten); } else { avg = (dValue + dNewValue) / 2; } dValue = avg; dPrevValue = dNewValue; dMax = Math.max(dMax, dNewMax); dMin = Math.min(dMin, dNewMin); lLastUpdate = lUpdateTime; } return true; } }
/* * Copyright 2013-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.elasticmapreduce; import javax.annotation.Generated; import com.amazonaws.*; import com.amazonaws.regions.*; import com.amazonaws.services.elasticmapreduce.model.*; import com.amazonaws.services.elasticmapreduce.waiters.AmazonElasticMapReduceWaiters; /** * Interface for accessing Amazon EMR. * <p> * <b>Note:</b> Do not directly implement this interface, new methods are added to it regularly. Extend from * {@link com.amazonaws.services.elasticmapreduce.AbstractAmazonElasticMapReduce} instead. * </p> * <p> * <p> * Amazon EMR is a web service that makes it easy to process large amounts of data efficiently. Amazon EMR uses Hadoop * processing combined with several AWS products to do tasks such as web indexing, data mining, log file analysis, * machine learning, scientific simulation, and data warehousing. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public interface AmazonElasticMapReduce { /** * The region metadata service name for computing region endpoints. You can use this value to retrieve metadata * (such as supported regions) of the service. * * @see RegionUtils#getRegionsForService(String) */ String ENDPOINT_PREFIX = "elasticmapreduce"; /** * Overrides the default endpoint for this client ("https://elasticmapreduce.amazonaws.com"). Callers can use this * method to control which AWS region they want to work with. * <p> * Callers can pass in just the endpoint (ex: "elasticmapreduce.amazonaws.com") or a full URL, including the * protocol (ex: "https://elasticmapreduce.amazonaws.com"). If the protocol is not specified here, the default * protocol from this client's {@link ClientConfiguration} will be used, which by default is HTTPS. * <p> * For more information on using AWS regions with the AWS SDK for Java, and a complete list of all available * endpoints for all AWS services, see: <a href= * "https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-region-selection.html#region-selection-choose-endpoint" * > https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-region-selection.html#region-selection- * choose-endpoint</a> * <p> * <b>This method is not threadsafe. An endpoint should be configured when the client is created and before any * service requests are made. Changing it afterwards creates inevitable race conditions for any service requests in * transit or retrying.</b> * * @param endpoint * The endpoint (ex: "elasticmapreduce.amazonaws.com") or a full URL, including the protocol (ex: * "https://elasticmapreduce.amazonaws.com") of the region specific AWS endpoint this client will communicate * with. * @deprecated use {@link AwsClientBuilder#setEndpointConfiguration(AwsClientBuilder.EndpointConfiguration)} for * example: * {@code builder.setEndpointConfiguration(new EndpointConfiguration(endpoint, signingRegion));} */ @Deprecated void setEndpoint(String endpoint); /** * An alternative to {@link AmazonElasticMapReduce#setEndpoint(String)}, sets the regional endpoint for this * client's service calls. Callers can use this method to control which AWS region they want to work with. * <p> * By default, all service endpoints in all regions use the https protocol. To use http instead, specify it in the * {@link ClientConfiguration} supplied at construction. * <p> * <b>This method is not threadsafe. A region should be configured when the client is created and before any service * requests are made. Changing it afterwards creates inevitable race conditions for any service requests in transit * or retrying.</b> * * @param region * The region this client will communicate with. See {@link Region#getRegion(com.amazonaws.regions.Regions)} * for accessing a given region. Must not be null and must be a region where the service is available. * * @see Region#getRegion(com.amazonaws.regions.Regions) * @see Region#createClient(Class, com.amazonaws.auth.AWSCredentialsProvider, ClientConfiguration) * @see Region#isServiceSupported(String) * @deprecated use {@link AwsClientBuilder#setRegion(String)} */ @Deprecated void setRegion(Region region); /** * <p> * Adds an instance fleet to a running cluster. * </p> * <note> * <p> * The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x. * </p> * </note> * * @param addInstanceFleetRequest * @return Result of the AddInstanceFleet operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.AddInstanceFleet * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/AddInstanceFleet" * target="_top">AWS API Documentation</a> */ AddInstanceFleetResult addInstanceFleet(AddInstanceFleetRequest addInstanceFleetRequest); /** * <p> * Adds one or more instance groups to a running cluster. * </p> * * @param addInstanceGroupsRequest * Input to an AddInstanceGroups call. * @return Result of the AddInstanceGroups operation returned by the service. * @throws InternalServerErrorException * Indicates that an error occurred while processing the request and that the request was not completed. * @sample AmazonElasticMapReduce.AddInstanceGroups * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/AddInstanceGroups" * target="_top">AWS API Documentation</a> */ AddInstanceGroupsResult addInstanceGroups(AddInstanceGroupsRequest addInstanceGroupsRequest); /** * <p> * AddJobFlowSteps adds new steps to a running cluster. A maximum of 256 steps are allowed in each job flow. * </p> * <p> * If your cluster is long-running (such as a Hive data warehouse) or complex, you may require more than 256 steps * to process your data. You can bypass the 256-step limitation in various ways, including using SSH to connect to * the master node and submitting queries directly to the software running on the master node, such as Hive and * Hadoop. For more information on how to do this, see <a * href="http://docs.aws.amazon.com/emr/latest/ManagementGuide/AddMoreThan256Steps.html">Add More than 256 Steps to * a Cluster</a> in the <i>Amazon EMR Management Guide</i>. * </p> * <p> * A step specifies the location of a JAR file stored either on the master node of the cluster or in Amazon S3. Each * step is performed by the main function of the main class of the JAR file. The main class can be specified either * in the manifest of the JAR or by using the MainFunction parameter of the step. * </p> * <p> * Amazon EMR executes each step in the order listed. For a step to be considered complete, the main function must * exit with a zero exit code and all Hadoop jobs started while the step was running must have completed and run * successfully. * </p> * <p> * You can only add steps to a cluster that is in one of the following states: STARTING, BOOTSTRAPPING, RUNNING, or * WAITING. * </p> * * @param addJobFlowStepsRequest * The input argument to the <a>AddJobFlowSteps</a> operation. * @return Result of the AddJobFlowSteps operation returned by the service. * @throws InternalServerErrorException * Indicates that an error occurred while processing the request and that the request was not completed. * @sample AmazonElasticMapReduce.AddJobFlowSteps * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/AddJobFlowSteps" * target="_top">AWS API Documentation</a> */ AddJobFlowStepsResult addJobFlowSteps(AddJobFlowStepsRequest addJobFlowStepsRequest); /** * <p> * Adds tags to an Amazon EMR resource. Tags make it easier to associate clusters in various ways, such as grouping * clusters to track your Amazon EMR resource allocation costs. For more information, see <a * href="http://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-tags.html">Tag Clusters</a>. * </p> * * @param addTagsRequest * This input identifies a cluster and a list of tags to attach. * @return Result of the AddTags operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.AddTags * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/AddTags" target="_top">AWS API * Documentation</a> */ AddTagsResult addTags(AddTagsRequest addTagsRequest); /** * <p> * Cancels a pending step or steps in a running cluster. Available only in Amazon EMR versions 4.8.0 and later, * excluding version 5.0.0. A maximum of 256 steps are allowed in each CancelSteps request. CancelSteps is * idempotent but asynchronous; it does not guarantee a step will be canceled, even if the request is successfully * submitted. You can only cancel steps that are in a <code>PENDING</code> state. * </p> * * @param cancelStepsRequest * The input argument to the <a>CancelSteps</a> operation. * @return Result of the CancelSteps operation returned by the service. * @throws InternalServerErrorException * Indicates that an error occurred while processing the request and that the request was not completed. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.CancelSteps * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/CancelSteps" target="_top">AWS * API Documentation</a> */ CancelStepsResult cancelSteps(CancelStepsRequest cancelStepsRequest); /** * <p> * Creates a security configuration, which is stored in the service and can be specified when a cluster is created. * </p> * * @param createSecurityConfigurationRequest * @return Result of the CreateSecurityConfiguration operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.CreateSecurityConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/CreateSecurityConfiguration" * target="_top">AWS API Documentation</a> */ CreateSecurityConfigurationResult createSecurityConfiguration(CreateSecurityConfigurationRequest createSecurityConfigurationRequest); /** * <p> * Deletes a security configuration. * </p> * * @param deleteSecurityConfigurationRequest * @return Result of the DeleteSecurityConfiguration operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.DeleteSecurityConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/DeleteSecurityConfiguration" * target="_top">AWS API Documentation</a> */ DeleteSecurityConfigurationResult deleteSecurityConfiguration(DeleteSecurityConfigurationRequest deleteSecurityConfigurationRequest); /** * <p> * Provides cluster-level details including status, hardware and software configuration, VPC settings, and so on. * </p> * * @param describeClusterRequest * This input determines which cluster to describe. * @return Result of the DescribeCluster operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.DescribeCluster * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/DescribeCluster" * target="_top">AWS API Documentation</a> */ DescribeClusterResult describeCluster(DescribeClusterRequest describeClusterRequest); /** * <p> * This API is deprecated and will eventually be removed. We recommend you use <a>ListClusters</a>, * <a>DescribeCluster</a>, <a>ListSteps</a>, <a>ListInstanceGroups</a> and <a>ListBootstrapActions</a> instead. * </p> * <p> * DescribeJobFlows returns a list of job flows that match all of the supplied parameters. The parameters can * include a list of job flow IDs, job flow states, and restrictions on job flow creation date and time. * </p> * <p> * Regardless of supplied parameters, only job flows created within the last two months are returned. * </p> * <p> * If no parameters are supplied, then job flows matching either of the following criteria are returned: * </p> * <ul> * <li> * <p> * Job flows created and completed in the last two weeks * </p> * </li> * <li> * <p> * Job flows created within the last two months that are in one of the following states: <code>RUNNING</code>, * <code>WAITING</code>, <code>SHUTTING_DOWN</code>, <code>STARTING</code> * </p> * </li> * </ul> * <p> * Amazon EMR can return a maximum of 512 job flow descriptions. * </p> * * @param describeJobFlowsRequest * The input for the <a>DescribeJobFlows</a> operation. * @return Result of the DescribeJobFlows operation returned by the service. * @throws InternalServerErrorException * Indicates that an error occurred while processing the request and that the request was not completed. * @sample AmazonElasticMapReduce.DescribeJobFlows * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/DescribeJobFlows" * target="_top">AWS API Documentation</a> */ @Deprecated DescribeJobFlowsResult describeJobFlows(DescribeJobFlowsRequest describeJobFlowsRequest); /** * Simplified method form for invoking the DescribeJobFlows operation. * * @see #describeJobFlows(DescribeJobFlowsRequest) */ @Deprecated DescribeJobFlowsResult describeJobFlows(); /** * <p> * Provides the details of a security configuration by returning the configuration JSON. * </p> * * @param describeSecurityConfigurationRequest * @return Result of the DescribeSecurityConfiguration operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.DescribeSecurityConfiguration * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/DescribeSecurityConfiguration" * target="_top">AWS API Documentation</a> */ DescribeSecurityConfigurationResult describeSecurityConfiguration(DescribeSecurityConfigurationRequest describeSecurityConfigurationRequest); /** * <p> * Provides more detail about the cluster step. * </p> * * @param describeStepRequest * This input determines which step to describe. * @return Result of the DescribeStep operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.DescribeStep * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/DescribeStep" target="_top">AWS * API Documentation</a> */ DescribeStepResult describeStep(DescribeStepRequest describeStepRequest); /** * <p> * Provides information about the bootstrap actions associated with a cluster. * </p> * * @param listBootstrapActionsRequest * This input determines which bootstrap actions to retrieve. * @return Result of the ListBootstrapActions operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.ListBootstrapActions * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ListBootstrapActions" * target="_top">AWS API Documentation</a> */ ListBootstrapActionsResult listBootstrapActions(ListBootstrapActionsRequest listBootstrapActionsRequest); /** * <p> * Provides the status of all clusters visible to this AWS account. Allows you to filter the list of clusters based * on certain criteria; for example, filtering by cluster creation date and time or by status. This call returns a * maximum of 50 clusters per call, but returns a marker to track the paging of the cluster list across multiple * ListClusters calls. * </p> * * @param listClustersRequest * This input determines how the ListClusters action filters the list of clusters that it returns. * @return Result of the ListClusters operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.ListClusters * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ListClusters" target="_top">AWS * API Documentation</a> */ ListClustersResult listClusters(ListClustersRequest listClustersRequest); /** * Simplified method form for invoking the ListClusters operation. * * @see #listClusters(ListClustersRequest) */ ListClustersResult listClusters(); /** * <p> * Lists all available details about the instance fleets in a cluster. * </p> * <note> * <p> * The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x * versions. * </p> * </note> * * @param listInstanceFleetsRequest * @return Result of the ListInstanceFleets operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.ListInstanceFleets * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ListInstanceFleets" * target="_top">AWS API Documentation</a> */ ListInstanceFleetsResult listInstanceFleets(ListInstanceFleetsRequest listInstanceFleetsRequest); /** * <p> * Provides all available details about the instance groups in a cluster. * </p> * * @param listInstanceGroupsRequest * This input determines which instance groups to retrieve. * @return Result of the ListInstanceGroups operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.ListInstanceGroups * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ListInstanceGroups" * target="_top">AWS API Documentation</a> */ ListInstanceGroupsResult listInstanceGroups(ListInstanceGroupsRequest listInstanceGroupsRequest); /** * <p> * Provides information for all active EC2 instances and EC2 instances terminated in the last 30 days, up to a * maximum of 2,000. EC2 instances in any of the following states are considered active: AWAITING_FULFILLMENT, * PROVISIONING, BOOTSTRAPPING, RUNNING. * </p> * * @param listInstancesRequest * This input determines which instances to list. * @return Result of the ListInstances operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.ListInstances * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ListInstances" target="_top">AWS * API Documentation</a> */ ListInstancesResult listInstances(ListInstancesRequest listInstancesRequest); /** * <p> * Lists all the security configurations visible to this account, providing their creation dates and times, and * their names. This call returns a maximum of 50 clusters per call, but returns a marker to track the paging of the * cluster list across multiple ListSecurityConfigurations calls. * </p> * * @param listSecurityConfigurationsRequest * @return Result of the ListSecurityConfigurations operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.ListSecurityConfigurations * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ListSecurityConfigurations" * target="_top">AWS API Documentation</a> */ ListSecurityConfigurationsResult listSecurityConfigurations(ListSecurityConfigurationsRequest listSecurityConfigurationsRequest); /** * <p> * Provides a list of steps for the cluster in reverse order unless you specify stepIds with the request. * </p> * * @param listStepsRequest * This input determines which steps to list. * @return Result of the ListSteps operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.ListSteps * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ListSteps" target="_top">AWS API * Documentation</a> */ ListStepsResult listSteps(ListStepsRequest listStepsRequest); /** * <p> * Modifies the target On-Demand and target Spot capacities for the instance fleet with the specified * InstanceFleetID within the cluster specified using ClusterID. The call either succeeds or fails atomically. * </p> * <note> * <p> * The instance fleet configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x * versions. * </p> * </note> * * @param modifyInstanceFleetRequest * @return Result of the ModifyInstanceFleet operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.ModifyInstanceFleet * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ModifyInstanceFleet" * target="_top">AWS API Documentation</a> */ ModifyInstanceFleetResult modifyInstanceFleet(ModifyInstanceFleetRequest modifyInstanceFleetRequest); /** * <p> * ModifyInstanceGroups modifies the number of nodes and configuration settings of an instance group. The input * parameters include the new target instance count for the group and the instance group ID. The call will either * succeed or fail atomically. * </p> * * @param modifyInstanceGroupsRequest * Change the size of some instance groups. * @return Result of the ModifyInstanceGroups operation returned by the service. * @throws InternalServerErrorException * Indicates that an error occurred while processing the request and that the request was not completed. * @sample AmazonElasticMapReduce.ModifyInstanceGroups * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ModifyInstanceGroups" * target="_top">AWS API Documentation</a> */ ModifyInstanceGroupsResult modifyInstanceGroups(ModifyInstanceGroupsRequest modifyInstanceGroupsRequest); /** * Simplified method form for invoking the ModifyInstanceGroups operation. * * @see #modifyInstanceGroups(ModifyInstanceGroupsRequest) */ ModifyInstanceGroupsResult modifyInstanceGroups(); /** * <p> * Creates or updates an automatic scaling policy for a core instance group or task instance group in an Amazon EMR * cluster. The automatic scaling policy defines how an instance group dynamically adds and terminates EC2 instances * in response to the value of a CloudWatch metric. * </p> * * @param putAutoScalingPolicyRequest * @return Result of the PutAutoScalingPolicy operation returned by the service. * @sample AmazonElasticMapReduce.PutAutoScalingPolicy * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/PutAutoScalingPolicy" * target="_top">AWS API Documentation</a> */ PutAutoScalingPolicyResult putAutoScalingPolicy(PutAutoScalingPolicyRequest putAutoScalingPolicyRequest); /** * <p> * Removes an automatic scaling policy from a specified instance group within an EMR cluster. * </p> * * @param removeAutoScalingPolicyRequest * @return Result of the RemoveAutoScalingPolicy operation returned by the service. * @sample AmazonElasticMapReduce.RemoveAutoScalingPolicy * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/RemoveAutoScalingPolicy" * target="_top">AWS API Documentation</a> */ RemoveAutoScalingPolicyResult removeAutoScalingPolicy(RemoveAutoScalingPolicyRequest removeAutoScalingPolicyRequest); /** * <p> * Removes tags from an Amazon EMR resource. Tags make it easier to associate clusters in various ways, such as * grouping clusters to track your Amazon EMR resource allocation costs. For more information, see <a * href="http://docs.aws.amazon.com/emr/latest/ManagementGuide/emr-plan-tags.html">Tag Clusters</a>. * </p> * <p> * The following example removes the stack tag with value Prod from a cluster: * </p> * * @param removeTagsRequest * This input identifies a cluster and a list of tags to remove. * @return Result of the RemoveTags operation returned by the service. * @throws InternalServerException * This exception occurs when there is an internal failure in the EMR service. * @throws InvalidRequestException * This exception occurs when there is something wrong with user input. * @sample AmazonElasticMapReduce.RemoveTags * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/RemoveTags" target="_top">AWS * API Documentation</a> */ RemoveTagsResult removeTags(RemoveTagsRequest removeTagsRequest); /** * <p> * RunJobFlow creates and starts running a new cluster (job flow). The cluster runs the steps specified. After the * steps complete, the cluster stops and the HDFS partition is lost. To prevent loss of data, configure the last * step of the job flow to store results in Amazon S3. If the <a>JobFlowInstancesConfig</a> * <code>KeepJobFlowAliveWhenNoSteps</code> parameter is set to <code>TRUE</code>, the cluster transitions to the * WAITING state rather than shutting down after the steps have completed. * </p> * <p> * For additional protection, you can set the <a>JobFlowInstancesConfig</a> <code>TerminationProtected</code> * parameter to <code>TRUE</code> to lock the cluster and prevent it from being terminated by API call, user * intervention, or in the event of a job flow error. * </p> * <p> * A maximum of 256 steps are allowed in each job flow. * </p> * <p> * If your cluster is long-running (such as a Hive data warehouse) or complex, you may require more than 256 steps * to process your data. You can bypass the 256-step limitation in various ways, including using the SSH shell to * connect to the master node and submitting queries directly to the software running on the master node, such as * Hive and Hadoop. For more information on how to do this, see <a * href="http://docs.aws.amazon.com/emr/latest/ManagementGuide/AddMoreThan256Steps.html">Add More than 256 Steps to * a Cluster</a> in the <i>Amazon EMR Management Guide</i>. * </p> * <p> * For long running clusters, we recommend that you periodically store your results. * </p> * <note> * <p> * The instance fleets configuration is available only in Amazon EMR versions 4.8.0 and later, excluding 5.0.x * versions. The RunJobFlow request can contain InstanceFleets parameters or InstanceGroups parameters, but not * both. * </p> * </note> * * @param runJobFlowRequest * Input to the <a>RunJobFlow</a> operation. * @return Result of the RunJobFlow operation returned by the service. * @throws InternalServerErrorException * Indicates that an error occurred while processing the request and that the request was not completed. * @sample AmazonElasticMapReduce.RunJobFlow * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/RunJobFlow" target="_top">AWS * API Documentation</a> */ RunJobFlowResult runJobFlow(RunJobFlowRequest runJobFlowRequest); /** * <p> * SetTerminationProtection locks a cluster (job flow) so the EC2 instances in the cluster cannot be terminated by * user intervention, an API call, or in the event of a job-flow error. The cluster still terminates upon successful * completion of the job flow. Calling <code>SetTerminationProtection</code> on a cluster is similar to calling the * Amazon EC2 <code>DisableAPITermination</code> API on all EC2 instances in a cluster. * </p> * <p> * <code>SetTerminationProtection</code> is used to prevent accidental termination of a cluster and to ensure that * in the event of an error, the instances persist so that you can recover any data stored in their ephemeral * instance storage. * </p> * <p> * To terminate a cluster that has been locked by setting <code>SetTerminationProtection</code> to <code>true</code> * , you must first unlock the job flow by a subsequent call to <code>SetTerminationProtection</code> in which you * set the value to <code>false</code>. * </p> * <p> * For more information, see<a * href="http://docs.aws.amazon.com/emr/latest/ManagementGuide/UsingEMR_TerminationProtection.html">Managing Cluster * Termination</a> in the <i>Amazon EMR Management Guide</i>. * </p> * * @param setTerminationProtectionRequest * The input argument to the <a>TerminationProtection</a> operation. * @return Result of the SetTerminationProtection operation returned by the service. * @throws InternalServerErrorException * Indicates that an error occurred while processing the request and that the request was not completed. * @sample AmazonElasticMapReduce.SetTerminationProtection * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/SetTerminationProtection" * target="_top">AWS API Documentation</a> */ SetTerminationProtectionResult setTerminationProtection(SetTerminationProtectionRequest setTerminationProtectionRequest); /** * <p> * Sets whether all AWS Identity and Access Management (IAM) users under your account can access the specified * clusters (job flows). This action works on running clusters. You can also set the visibility of a cluster when * you launch it using the <code>VisibleToAllUsers</code> parameter of <a>RunJobFlow</a>. The SetVisibleToAllUsers * action can be called only by an IAM user who created the cluster or the AWS account that owns the cluster. * </p> * * @param setVisibleToAllUsersRequest * The input to the SetVisibleToAllUsers action. * @return Result of the SetVisibleToAllUsers operation returned by the service. * @throws InternalServerErrorException * Indicates that an error occurred while processing the request and that the request was not completed. * @sample AmazonElasticMapReduce.SetVisibleToAllUsers * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/SetVisibleToAllUsers" * target="_top">AWS API Documentation</a> */ SetVisibleToAllUsersResult setVisibleToAllUsers(SetVisibleToAllUsersRequest setVisibleToAllUsersRequest); /** * <p> * TerminateJobFlows shuts a list of clusters (job flows) down. When a job flow is shut down, any step not yet * completed is canceled and the EC2 instances on which the cluster is running are stopped. Any log files not * already saved are uploaded to Amazon S3 if a LogUri was specified when the cluster was created. * </p> * <p> * The maximum number of clusters allowed is 10. The call to <code>TerminateJobFlows</code> is asynchronous. * Depending on the configuration of the cluster, it may take up to 1-5 minutes for the cluster to completely * terminate and release allocated resources, such as Amazon EC2 instances. * </p> * * @param terminateJobFlowsRequest * Input to the <a>TerminateJobFlows</a> operation. * @return Result of the TerminateJobFlows operation returned by the service. * @throws InternalServerErrorException * Indicates that an error occurred while processing the request and that the request was not completed. * @sample AmazonElasticMapReduce.TerminateJobFlows * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/TerminateJobFlows" * target="_top">AWS API Documentation</a> */ TerminateJobFlowsResult terminateJobFlows(TerminateJobFlowsRequest terminateJobFlowsRequest); /** * Shuts down this client object, releasing any resources that might be held open. This is an optional method, and * callers are not expected to call it, but can if they want to explicitly release any open resources. Once a client * has been shutdown, it should not be used to make any more requests. */ void shutdown(); /** * Returns additional metadata for a previously executed successful request, typically used for debugging issues * where a service isn't acting as expected. This data isn't considered part of the result data returned by an * operation, so it's available through this separate, diagnostic interface. * <p> * Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic * information for an executed request, you should use this method to retrieve it as soon as possible after * executing a request. * * @param request * The originally executed request. * * @return The response metadata for the specified request, or null if none is available. */ ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request); AmazonElasticMapReduceWaiters waiters(); }
package com.semmle.js.ast; /** * An enhanced for statement, that is, either a {@link ForInStatement} or a {@link ForOfStatement}. */ public abstract class EnhancedForStatement extends Loop { private final Node left; private final Expression defaultValue; private final Expression right; public EnhancedForStatement( String type, SourceLocation loc, Node left, Expression right, Statement body) { super(type, loc, body); if (left instanceof AssignmentPattern) { AssignmentPattern ap = (AssignmentPattern) left; this.left = ap.getLeft(); this.defaultValue = ap.getRight(); } else { this.left = left; this.defaultValue = null; } this.right = right; } /** * The iterator variable of this statement; may be either a {@link VariableDeclaration} statement, * or an lvalue {@link Expression}. */ public Node getLeft() { return left; } /** Does the iterator variable of this statement have a default value? */ public boolean hasDefaultValue() { return defaultValue != null; } /** Get the default value of the iterator variable of this statement. */ public Expression getDefaultValue() { return defaultValue; } /** The expression this loop iterates over. */ public Expression getRight() { return right; } @Override public Node getContinueTarget() { return this; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pinot.common.config.provider; import com.google.common.base.Preconditions; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import javax.annotation.Nullable; import org.I0Itec.zkclient.IZkChildListener; import org.I0Itec.zkclient.IZkDataListener; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.MapUtils; import org.apache.helix.AccessOption; import org.apache.helix.ZNRecord; import org.apache.helix.store.zk.ZkHelixPropertyStore; import org.apache.pinot.common.request.Expression; import org.apache.pinot.common.utils.SchemaUtils; import org.apache.pinot.common.utils.config.TableConfigUtils; import org.apache.pinot.spi.config.provider.PinotConfigProvider; import org.apache.pinot.spi.config.provider.SchemaChangeListener; import org.apache.pinot.spi.config.provider.TableConfigChangeListener; import org.apache.pinot.spi.config.table.QueryConfig; import org.apache.pinot.spi.config.table.TableConfig; import org.apache.pinot.spi.data.DimensionFieldSpec; import org.apache.pinot.spi.data.FieldSpec; import org.apache.pinot.spi.data.Schema; import org.apache.pinot.spi.utils.CommonConstants.Segment.BuiltInVirtualColumn; import org.apache.pinot.spi.utils.builder.TableNameBuilder; import org.apache.pinot.sql.parsers.CalciteSqlParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An implementation of {@link PinotConfigProvider} * The {@code TableCache} caches all the table configs and schemas within the cluster, and listens on ZK changes to keep * them in sync. It also maintains the table name map and the column name map for case-insensitive queries. */ public class TableCache implements PinotConfigProvider { private static final Logger LOGGER = LoggerFactory.getLogger(TableCache.class); private static final String TABLE_CONFIG_PARENT_PATH = "/CONFIGS/TABLE"; private static final String TABLE_CONFIG_PATH_PREFIX = "/CONFIGS/TABLE/"; private static final String SCHEMA_PARENT_PATH = "/SCHEMAS"; private static final String SCHEMA_PATH_PREFIX = "/SCHEMAS/"; private static final String LOWER_CASE_OFFLINE_TABLE_SUFFIX = "_offline"; private static final String LOWER_CASE_REALTIME_TABLE_SUFFIX = "_realtime"; // NOTE: No need to use concurrent set because it is always accessed within the ZK change listener lock private final Set<TableConfigChangeListener> _tableConfigChangeListeners = new HashSet<>(); private final Set<SchemaChangeListener> _schemaChangeListeners = new HashSet<>(); private final ZkHelixPropertyStore<ZNRecord> _propertyStore; private final boolean _caseInsensitive; private final ZkTableConfigChangeListener _zkTableConfigChangeListener = new ZkTableConfigChangeListener(); // Key is table name with type suffix, value is table config info private final Map<String, TableConfigInfo> _tableConfigInfoMap = new ConcurrentHashMap<>(); // Key is table name (with or without type suffix), value is schema name // It only stores table with schema name not matching the raw table name private final Map<String, String> _schemaNameMap = new ConcurrentHashMap<>(); // Key is lower case table name (with or without type suffix), value is actual table name // For case-insensitive mode only private final Map<String, String> _tableNameMap; private final ZkSchemaChangeListener _zkSchemaChangeListener = new ZkSchemaChangeListener(); // Key is schema name, value is schema info private final Map<String, SchemaInfo> _schemaInfoMap = new ConcurrentHashMap<>(); public TableCache(ZkHelixPropertyStore<ZNRecord> propertyStore, boolean caseInsensitive) { _propertyStore = propertyStore; _caseInsensitive = caseInsensitive; _tableNameMap = caseInsensitive ? new ConcurrentHashMap<>() : null; synchronized (_zkTableConfigChangeListener) { // Subscribe child changes before reading the data to avoid missing changes _propertyStore.subscribeChildChanges(TABLE_CONFIG_PARENT_PATH, _zkTableConfigChangeListener); List<String> tables = _propertyStore.getChildNames(TABLE_CONFIG_PARENT_PATH, AccessOption.PERSISTENT); if (CollectionUtils.isNotEmpty(tables)) { List<String> pathsToAdd = new ArrayList<>(tables.size()); for (String tableNameWithType : tables) { pathsToAdd.add(TABLE_CONFIG_PATH_PREFIX + tableNameWithType); } addTableConfigs(pathsToAdd); } } synchronized (_zkSchemaChangeListener) { // Subscribe child changes before reading the data to avoid missing changes _propertyStore.subscribeChildChanges(SCHEMA_PARENT_PATH, _zkSchemaChangeListener); List<String> tables = _propertyStore.getChildNames(SCHEMA_PARENT_PATH, AccessOption.PERSISTENT); if (CollectionUtils.isNotEmpty(tables)) { List<String> pathsToAdd = new ArrayList<>(tables.size()); for (String rawTableName : tables) { pathsToAdd.add(SCHEMA_PATH_PREFIX + rawTableName); } addSchemas(pathsToAdd); } } LOGGER.info("Initialized TableCache with caseInsensitive: {}", caseInsensitive); } /** * Returns {@code true} if the TableCache is case-insensitive, {@code false} otherwise. */ public boolean isCaseInsensitive() { return _caseInsensitive; } /** * For case-insensitive only, returns the actual table name for the given case-insensitive table name (with or without * type suffix), or {@code null} if the table does not exist. */ @Nullable public String getActualTableName(String caseInsensitiveTableName) { Preconditions.checkState(_caseInsensitive, "TableCache is not case-insensitive"); return _tableNameMap.get(caseInsensitiveTableName.toLowerCase()); } /** * For case-insensitive only, returns a map from lower case column name to actual column name for the given table, or * {@code null} if the table schema does not exist. */ @Nullable public Map<String, String> getColumnNameMap(String rawTableName) { String schemaName = _schemaNameMap.getOrDefault(rawTableName, rawTableName); SchemaInfo schemaInfo = _schemaInfoMap.getOrDefault(schemaName, _schemaInfoMap.get(rawTableName)); return schemaInfo != null ? schemaInfo._columnNameMap : null; } /** * Returns the expression override map for the given table, or {@code null} if no override is configured. */ @Nullable public Map<Expression, Expression> getExpressionOverrideMap(String tableNameWithType) { TableConfigInfo tableConfigInfo = _tableConfigInfoMap.get(tableNameWithType); return tableConfigInfo != null ? tableConfigInfo._expressionOverrideMap : null; } /** * Returns the table config for the given table, or {@code null} if it does not exist. */ @Nullable @Override public TableConfig getTableConfig(String tableNameWithType) { TableConfigInfo tableConfigInfo = _tableConfigInfoMap.get(tableNameWithType); return tableConfigInfo != null ? tableConfigInfo._tableConfig : null; } @Override public boolean registerTableConfigChangeListener(TableConfigChangeListener tableConfigChangeListener) { synchronized (_zkTableConfigChangeListener) { boolean added = _tableConfigChangeListeners.add(tableConfigChangeListener); if (added) { tableConfigChangeListener.onChange(getTableConfigs()); } return added; } } /** * Returns the schema for the given table, or {@code null} if it does not exist. */ @Nullable @Override public Schema getSchema(String rawTableName) { String schemaName = _schemaNameMap.getOrDefault(rawTableName, rawTableName); SchemaInfo schemaInfo = _schemaInfoMap.get(schemaName); return schemaInfo != null ? schemaInfo._schema : null; } @Override public boolean registerSchemaChangeListener(SchemaChangeListener schemaChangeListener) { synchronized (_zkSchemaChangeListener) { boolean added = _schemaChangeListeners.add(schemaChangeListener); if (added) { schemaChangeListener.onChange(getSchemas()); } return added; } } private void addTableConfigs(List<String> paths) { // Subscribe data changes before reading the data to avoid missing changes for (String path : paths) { _propertyStore.subscribeDataChanges(path, _zkTableConfigChangeListener); } List<ZNRecord> znRecords = _propertyStore.get(paths, null, AccessOption.PERSISTENT); for (ZNRecord znRecord : znRecords) { if (znRecord != null) { try { putTableConfig(znRecord); } catch (Exception e) { LOGGER.error("Caught exception while adding table config for ZNRecord: {}", znRecord.getId(), e); } } } } private void putTableConfig(ZNRecord znRecord) throws IOException { TableConfig tableConfig = TableConfigUtils.fromZNRecord(znRecord); String tableNameWithType = tableConfig.getTableName(); _tableConfigInfoMap.put(tableNameWithType, new TableConfigInfo(tableConfig)); String schemaName = tableConfig.getValidationConfig().getSchemaName(); String rawTableName = TableNameBuilder.extractRawTableName(tableNameWithType); if (schemaName != null && !schemaName.equals(rawTableName)) { _schemaNameMap.put(tableNameWithType, schemaName); _schemaNameMap.put(rawTableName, schemaName); } else { removeSchemaName(tableNameWithType); } if (_caseInsensitive) { _tableNameMap.put(tableNameWithType.toLowerCase(), tableNameWithType); _tableNameMap.put(rawTableName.toLowerCase(), rawTableName); } } private void removeTableConfig(String path) { _propertyStore.unsubscribeDataChanges(path, _zkTableConfigChangeListener); String tableNameWithType = path.substring(TABLE_CONFIG_PATH_PREFIX.length()); _tableConfigInfoMap.remove(tableNameWithType); removeSchemaName(tableNameWithType); if (_caseInsensitive) { _tableNameMap.remove(tableNameWithType.toLowerCase()); String lowerCaseRawTableName = TableNameBuilder.extractRawTableName(tableNameWithType).toLowerCase(); if (TableNameBuilder.isOfflineTableResource(tableNameWithType)) { if (!_tableNameMap.containsKey(lowerCaseRawTableName + LOWER_CASE_REALTIME_TABLE_SUFFIX)) { _tableNameMap.remove(lowerCaseRawTableName); } } else { if (!_tableNameMap.containsKey(lowerCaseRawTableName + LOWER_CASE_OFFLINE_TABLE_SUFFIX)) { _tableNameMap.remove(lowerCaseRawTableName); } } } } private void removeSchemaName(String tableNameWithType) { if (_schemaNameMap.remove(tableNameWithType) != null) { String rawTableName = TableNameBuilder.extractRawTableName(tableNameWithType); if (TableNameBuilder.isOfflineTableResource(tableNameWithType)) { if (!_schemaNameMap.containsKey(TableNameBuilder.REALTIME.tableNameWithType(rawTableName))) { _schemaNameMap.remove(rawTableName); } } else { if (!_schemaNameMap.containsKey(TableNameBuilder.OFFLINE.tableNameWithType(rawTableName))) { _schemaNameMap.remove(rawTableName); } } } } private void addSchemas(List<String> paths) { // Subscribe data changes before reading the data to avoid missing changes for (String path : paths) { _propertyStore.subscribeDataChanges(path, _zkSchemaChangeListener); } List<ZNRecord> znRecords = _propertyStore.get(paths, null, AccessOption.PERSISTENT); for (ZNRecord znRecord : znRecords) { if (znRecord != null) { try { putSchema(znRecord); } catch (Exception e) { LOGGER.error("Caught exception while adding schema for ZNRecord: {}", znRecord.getId(), e); } } } } private void putSchema(ZNRecord znRecord) throws IOException { Schema schema = SchemaUtils.fromZNRecord(znRecord); addBuiltInVirtualColumns(schema); String schemaName = schema.getSchemaName(); Map<String, String> columnNameMap = new HashMap<>(); if (_caseInsensitive) { for (String columnName : schema.getColumnNames()) { columnNameMap.put(columnName.toLowerCase(), columnName); } } else { for (String columnName : schema.getColumnNames()) { columnNameMap.put(columnName, columnName); } } _schemaInfoMap.put(schemaName, new SchemaInfo(schema, columnNameMap)); } /** * Adds the built-in virtual columns to the schema. * NOTE: The virtual column provider class is not added. */ private static void addBuiltInVirtualColumns(Schema schema) { if (!schema.hasColumn(BuiltInVirtualColumn.DOCID)) { schema.addField(new DimensionFieldSpec(BuiltInVirtualColumn.DOCID, FieldSpec.DataType.INT, true)); } if (!schema.hasColumn(BuiltInVirtualColumn.HOSTNAME)) { schema.addField(new DimensionFieldSpec(BuiltInVirtualColumn.HOSTNAME, FieldSpec.DataType.STRING, true)); } if (!schema.hasColumn(BuiltInVirtualColumn.SEGMENTNAME)) { schema.addField(new DimensionFieldSpec(BuiltInVirtualColumn.SEGMENTNAME, FieldSpec.DataType.STRING, true)); } } private void removeSchema(String path) { _propertyStore.unsubscribeDataChanges(path, _zkSchemaChangeListener); String schemaName = path.substring(SCHEMA_PATH_PREFIX.length()); _schemaInfoMap.remove(schemaName); } private void notifyTableConfigChangeListeners() { if (!_tableConfigChangeListeners.isEmpty()) { List<TableConfig> tableConfigs = getTableConfigs(); for (TableConfigChangeListener tableConfigChangeListener : _tableConfigChangeListeners) { tableConfigChangeListener.onChange(tableConfigs); } } } private List<TableConfig> getTableConfigs() { List<TableConfig> tableConfigs = new ArrayList<>(_tableConfigInfoMap.size()); for (TableConfigInfo tableConfigInfo : _tableConfigInfoMap.values()) { tableConfigs.add(tableConfigInfo._tableConfig); } return tableConfigs; } private void notifySchemaChangeListeners() { if (!_schemaChangeListeners.isEmpty()) { List<Schema> schemas = getSchemas(); for (SchemaChangeListener schemaChangeListener : _schemaChangeListeners) { schemaChangeListener.onChange(schemas); } } } private List<Schema> getSchemas() { List<Schema> schemas = new ArrayList<>(_schemaInfoMap.size()); for (SchemaInfo schemaInfo : _schemaInfoMap.values()) { schemas.add(schemaInfo._schema); } return schemas; } private class ZkTableConfigChangeListener implements IZkChildListener, IZkDataListener { @Override public synchronized void handleChildChange(String path, List<String> tableNamesWithType) { if (CollectionUtils.isEmpty(tableNamesWithType)) { return; } // Only process new added table configs. Changed/removed table configs are handled by other callbacks. List<String> pathsToAdd = new ArrayList<>(); for (String tableNameWithType : tableNamesWithType) { if (!_tableConfigInfoMap.containsKey(tableNameWithType)) { pathsToAdd.add(TABLE_CONFIG_PATH_PREFIX + tableNameWithType); } } if (!pathsToAdd.isEmpty()) { addTableConfigs(pathsToAdd); } notifyTableConfigChangeListeners(); } @Override public synchronized void handleDataChange(String path, Object data) { if (data != null) { ZNRecord znRecord = (ZNRecord) data; try { putTableConfig(znRecord); } catch (Exception e) { LOGGER.error("Caught exception while refreshing table config for ZNRecord: {}", znRecord.getId(), e); } notifyTableConfigChangeListeners(); } } @Override public synchronized void handleDataDeleted(String path) { // NOTE: The path here is the absolute ZK path instead of the relative path to the property store. String tableNameWithType = path.substring(path.lastIndexOf('/') + 1); removeTableConfig(TABLE_CONFIG_PATH_PREFIX + tableNameWithType); notifyTableConfigChangeListeners(); } } private class ZkSchemaChangeListener implements IZkChildListener, IZkDataListener { @Override public synchronized void handleChildChange(String path, List<String> schemaNames) { if (CollectionUtils.isEmpty(schemaNames)) { return; } // Only process new added schemas. Changed/removed schemas are handled by other callbacks. List<String> pathsToAdd = new ArrayList<>(); for (String schemaName : schemaNames) { if (!_schemaInfoMap.containsKey(schemaName)) { pathsToAdd.add(SCHEMA_PATH_PREFIX + schemaName); } } if (!pathsToAdd.isEmpty()) { addSchemas(pathsToAdd); } notifySchemaChangeListeners(); } @Override public synchronized void handleDataChange(String path, Object data) { if (data != null) { ZNRecord znRecord = (ZNRecord) data; try { putSchema(znRecord); } catch (Exception e) { LOGGER.error("Caught exception while refreshing schema for ZNRecord: {}", znRecord.getId(), e); } notifySchemaChangeListeners(); } } @Override public synchronized void handleDataDeleted(String path) { // NOTE: The path here is the absolute ZK path instead of the relative path to the property store. String schemaName = path.substring(path.lastIndexOf('/') + 1); removeSchema(SCHEMA_PATH_PREFIX + schemaName); notifySchemaChangeListeners(); } } private static class TableConfigInfo { final TableConfig _tableConfig; final Map<Expression, Expression> _expressionOverrideMap; private TableConfigInfo(TableConfig tableConfig) { _tableConfig = tableConfig; QueryConfig queryConfig = tableConfig.getQueryConfig(); if (queryConfig != null && MapUtils.isNotEmpty(queryConfig.getExpressionOverrideMap())) { Map<Expression, Expression> expressionOverrideMap = new TreeMap<>(); for (Map.Entry<String, String> entry : queryConfig.getExpressionOverrideMap().entrySet()) { try { Expression srcExp = CalciteSqlParser.compileToExpression(entry.getKey()); Expression destExp = CalciteSqlParser.compileToExpression(entry.getValue()); expressionOverrideMap.put(srcExp, destExp); } catch (Exception e) { LOGGER.warn("Caught exception while compiling expression override: {} -> {} for table: {}, skipping it", entry.getKey(), entry.getValue(), tableConfig.getTableName()); } } int mapSize = expressionOverrideMap.size(); if (mapSize == 0) { _expressionOverrideMap = null; } else if (mapSize == 1) { Map.Entry<Expression, Expression> entry = expressionOverrideMap.entrySet().iterator().next(); _expressionOverrideMap = Collections.singletonMap(entry.getKey(), entry.getValue()); } else { _expressionOverrideMap = expressionOverrideMap; } } else { _expressionOverrideMap = null; } } } private static class SchemaInfo { final Schema _schema; final Map<String, String> _columnNameMap; private SchemaInfo(Schema schema, Map<String, String> columnNameMap) { _schema = schema; _columnNameMap = columnNameMap; } } }
/* * Copyright 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.car.widget; import static androidx.test.espresso.Espresso.onView; import static androidx.test.espresso.action.ViewActions.click; import static androidx.test.espresso.contrib.RecyclerViewActions.actionOnItemAtPosition; import static androidx.test.espresso.contrib.RecyclerViewActions.scrollToPosition; import static androidx.test.espresso.matcher.ViewMatchers.withId; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.number.IsCloseTo.closeTo; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import android.content.pm.PackageManager; import android.graphics.drawable.Drawable; import android.text.InputFilter; import android.text.TextUtils; import android.view.View; import android.view.ViewGroup; import androidx.car.test.R; import androidx.car.util.CarUxRestrictionsTestUtils; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.test.core.app.ApplicationProvider; import androidx.test.espresso.UiController; import androidx.test.espresso.ViewAction; import androidx.test.ext.junit.runners.AndroidJUnit4; import androidx.test.filters.LargeTest; import androidx.test.rule.ActivityTestRule; import org.hamcrest.Matcher; import org.junit.Assume; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Locale; /** * Tests the layout configuration in {@link ActionListItem}. */ @RunWith(AndroidJUnit4.class) @LargeTest public class ActionListItemTest { @Rule public ActivityTestRule<PagedListViewTestActivity> mActivityRule = new ActivityTestRule<>(PagedListViewTestActivity.class); private PagedListViewTestActivity mActivity; private PagedListView mPagedListView; private ListItemAdapter mAdapter; @Before public void setUp() { Assume.assumeTrue(isAutoDevice()); mActivity = mActivityRule.getActivity(); mPagedListView = mActivity.findViewById(R.id.paged_list_view); } @Test public void testPrimaryActionVisible() { ActionListItem largeIcon = new ActionListItem(mActivity); largeIcon.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_LARGE); ActionListItem mediumIcon = new ActionListItem(mActivity); mediumIcon.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_MEDIUM); ActionListItem smallIcon = new ActionListItem(mActivity); smallIcon.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_SMALL); List<ActionListItem> items = Arrays.asList(largeIcon, mediumIcon, smallIcon); setupPagedListView(items); assertThat(getViewHolderAtPosition(0).getPrimaryIcon().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(getViewHolderAtPosition(1).getPrimaryIcon().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(getViewHolderAtPosition(2).getPrimaryIcon().getVisibility(), is(equalTo(View.VISIBLE))); } @Test public void testTextVisible() { ActionListItem item0 = new ActionListItem(mActivity); item0.setTitle("title"); ActionListItem item1 = new ActionListItem(mActivity); item1.setBody("body"); List<ActionListItem> items = Arrays.asList(item0, item1); setupPagedListView(items); assertThat(getViewHolderAtPosition(0).getTitle().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(getViewHolderAtPosition(1).getBody().getVisibility(), is(equalTo(View.VISIBLE))); } @Test public void testTextStartMarginMatchesPrimaryActionType() { ActionListItem largeIcon = new ActionListItem(mActivity); largeIcon.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_LARGE); ActionListItem mediumIcon = new ActionListItem(mActivity); mediumIcon.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_MEDIUM); ActionListItem smallIcon = new ActionListItem(mActivity); smallIcon.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_SMALL); ActionListItem emptyIcon = new ActionListItem(mActivity); emptyIcon.setPrimaryActionEmptyIcon(); ActionListItem noIcon = new ActionListItem(mActivity); noIcon.setPrimaryActionNoIcon(); List<ActionListItem> items = Arrays.asList( largeIcon, mediumIcon, smallIcon, emptyIcon, noIcon); List<Integer> expectedStartMargin = Arrays.asList( R.dimen.car_keyline_4, // Large icon. R.dimen.car_keyline_3, // Medium icon. R.dimen.car_keyline_3, // Small icon. R.dimen.car_keyline_3, // Empty icon. R.dimen.car_keyline_1); // No icon. setupPagedListView(items); for (int i = 0; i < items.size(); i++) { ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(i); int expected = ApplicationProvider.getApplicationContext().getResources() .getDimensionPixelSize(expectedStartMargin.get(i)); assertThat(((ViewGroup.MarginLayoutParams) viewHolder.getTitle().getLayoutParams()) .getMarginStart(), is(equalTo(expected))); assertThat(((ViewGroup.MarginLayoutParams) viewHolder.getBody().getLayoutParams()) .getMarginStart(), is(equalTo(expected))); } } @Test public void testPrimaryActionButtonVisibility_withDividers_Borderless() { ActionListItem item = new ActionListItem(mActivity); item.setPrimaryAction("text", /* showDivider= */ true, v -> { /* Do nothing. */ }); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getBorderedPrimaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryActionDivider().getVisibility(), is(equalTo(View.VISIBLE))); } @Test public void testPrimaryActionButtonVisibility_withDividers() { ActionListItem item = new ActionListItem(mActivity); item.setPrimaryAction("text", /* showDivider= */ true, v -> { /* Do nothing. */ }); item.setActionBorderless(false); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessPrimaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getBorderedPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryActionDivider().getVisibility(), is(equalTo(View.VISIBLE))); } @Test public void testSecondaryActionButtonVisibility_withDividers_Borderless() { ActionListItem item = new ActionListItem(mActivity); item.setSecondaryAction("text", /* showDivider= */ true, v -> { /* Do nothing. */ }); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getBorderedSecondaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); } @Test public void testSecondaryActionButtonVisibility_withDividers() { ActionListItem item = new ActionListItem(mActivity); item.setSecondaryAction("text", /* showDivider= */ true, v -> { /* Do nothing. */ }); item.setActionBorderless(false); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessSecondaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getBorderedSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); } @Test public void testTwoActionButtonsVisibility_withDividers_Borderless() { ActionListItem item = new ActionListItem(mActivity); item.setPrimaryAction("text", /* showDivider= */ true, v -> { /* Do nothing. */ }); item.setSecondaryAction("text", /* showDivider= */ true, v -> { /* Do nothing. */ }); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getBorderedPrimaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryActionDivider().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getBorderlessSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getBorderedSecondaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getSecondaryActionDivider().getVisibility(), is(equalTo(View.VISIBLE))); } @Test public void testTwoActionButtonsVisibility_withDividers() { ActionListItem item = new ActionListItem(mActivity); item.setPrimaryAction("text", /* showDivider= */ true, v -> { /* Do nothing. */ }); item.setSecondaryAction("text", /* showDivider= */ true, v -> { /* Do nothing. */ }); item.setActionBorderless(false); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessPrimaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getBorderedPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryActionDivider().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getBorderlessSecondaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getBorderedSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getSecondaryActionDivider().getVisibility(), is(equalTo(View.VISIBLE))); } @Test public void testSingleActionButtonVisibility_noDividers_Borderless() { ActionListItem item = new ActionListItem(mActivity); item.setPrimaryAction("text", /* showDivider= */ false, v -> { /* Do nothing. */ }); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getBorderedPrimaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryActionDivider().getVisibility(), is(equalTo(View.GONE))); } @Test public void testSingleActionButtonVisibility_noDividers() { ActionListItem item = new ActionListItem(mActivity); item.setPrimaryAction("text", /* showDivider= */ false, v -> { /* Do nothing. */ }); item.setActionBorderless(false); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessPrimaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getBorderedPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryActionDivider().getVisibility(), is(equalTo(View.GONE))); } @Test public void testTwoActionButtonsVisibility_noDividers_Borderless() { ActionListItem item = new ActionListItem(mActivity); item.setPrimaryAction("text", /* showDivider= */ false, v -> { /* Do nothing. */ }); item.setSecondaryAction("text", /* showDivider= */ false, v -> { /* Do nothing. */ }); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getBorderedPrimaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryActionDivider().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getBorderlessSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getBorderedSecondaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getSecondaryActionDivider().getVisibility(), is(equalTo(View.GONE))); } @Test public void testTwoActionButtonsVisibility_noDividers() { ActionListItem item = new ActionListItem(mActivity); item.setPrimaryAction("text", /* showDivider= */ false, v -> { /* Do nothing. */ }); item.setSecondaryAction("text", /* showDivider= */ false, v -> { /* Do nothing. */ }); item.setActionBorderless(false); List<ActionListItem> items = Arrays.asList(item); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBorderlessPrimaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getBorderedPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getPrimaryActionDivider().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getBorderlessSecondaryAction().getVisibility(), is(equalTo(View.GONE))); assertThat(viewHolder.getBorderedSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getSecondaryAction().getVisibility(), is(equalTo(View.VISIBLE))); assertThat(viewHolder.getSecondaryActionDivider().getVisibility(), is(equalTo(View.GONE))); } @Test public void testClickInterceptor_ClickableIfOneActionSet() { ActionListItem item = new ActionListItem(mActivity); item.setEnabled(true); item.setPrimaryAction("text", /* showDivider= */ true, v -> { }); setupPagedListView(Arrays.asList(item)); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertTrue(viewHolder.getClickInterceptView().isClickable()); } @Test public void testClickInterceptor_VisibleIfOneActionSet() { ActionListItem item = new ActionListItem(mActivity); item.setEnabled(true); item.setPrimaryAction("text", /* showDivider= */ true, v -> { }); setupPagedListView(Arrays.asList(item)); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertTrue(viewHolder.getClickInterceptView().getVisibility() == View.VISIBLE); } @Test public void testClickInterceptor_ClickableIfTwoActionsSet() { ActionListItem item = new ActionListItem(mActivity); item.setEnabled(true); item.setPrimaryAction("text", /* showDivider= */ true, v -> { }); item.setSecondaryAction("text", /* showDivider= */ true, v -> { }); setupPagedListView(Arrays.asList(item)); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertTrue(viewHolder.getClickInterceptView().isClickable()); } @Test public void testClickInterceptor_VisibleIfTwoActionsSet() { ActionListItem item = new ActionListItem(mActivity); item.setEnabled(true); item.setPrimaryAction("text", /* showDivider= */ true, v -> { }); item.setSecondaryAction("text", /* showDivider= */ true, v -> { }); setupPagedListView(Arrays.asList(item)); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertTrue(viewHolder.getClickInterceptView().getVisibility() == View.VISIBLE); } @Test public void testItemWithOnlyTitleIsSingleLine() { // Only space. ActionListItem item0 = new ActionListItem(mActivity); item0.setTitle(" "); // Underscore. ActionListItem item1 = new ActionListItem(mActivity); item1.setTitle("______"); ActionListItem item2 = new ActionListItem(mActivity); item2.setTitle("ALL UPPER CASE"); // String wouldn't fit in one line. ActionListItem item3 = new ActionListItem(mActivity); item3.setTitle(ApplicationProvider.getApplicationContext().getResources().getString( R.string.over_uxr_text_length_limit)); List<ActionListItem> items = Arrays.asList(item0, item1, item2, item3); setupPagedListView(items); double singleLineHeight = ApplicationProvider.getApplicationContext().getResources().getDimension( R.dimen.car_single_line_list_item_height); LinearLayoutManager layoutManager = (LinearLayoutManager) mPagedListView.getRecyclerView().getLayoutManager(); for (int i = 0; i < items.size(); i++) { assertThat((double) layoutManager.findViewByPosition(i).getHeight(), is(closeTo(singleLineHeight, 1.0d))); } } @Test public void testItemWithBodyTextIsAtLeastDoubleLine() { // Only space. ActionListItem item0 = new ActionListItem(mActivity); item0.setBody(" "); // Underscore. ActionListItem item1 = new ActionListItem(mActivity); item1.setBody("____"); // String wouldn't fit in one line. ActionListItem item2 = new ActionListItem(mActivity); item2.setBody(ApplicationProvider.getApplicationContext().getResources().getString( R.string.over_uxr_text_length_limit)); List<ActionListItem> items = Arrays.asList(item0, item1, item2); setupPagedListView(items); final int doubleLineHeight = (int) ApplicationProvider.getApplicationContext().getResources().getDimension( R.dimen.car_double_line_list_item_height); LinearLayoutManager layoutManager = (LinearLayoutManager) mPagedListView.getRecyclerView().getLayoutManager(); for (int i = 0; i < items.size(); i++) { assertThat(layoutManager.findViewByPosition(i).getHeight(), is(greaterThanOrEqualTo(doubleLineHeight))); } } @Test public void testPrimaryIconDrawable() { Drawable drawable = ApplicationProvider.getApplicationContext().getResources().getDrawable( android.R.drawable.sym_def_app_icon, null); ActionListItem item0 = new ActionListItem(mActivity); item0.setPrimaryActionIcon(drawable, ActionListItem.PRIMARY_ACTION_ICON_SIZE_LARGE); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); assertTrue(getViewHolderAtPosition(0).getPrimaryIcon().getDrawable().getConstantState() .equals(drawable.getConstantState())); } @Test public void testPrimaryIconSizesInIncreasingOrder() { ActionListItem small = new ActionListItem(mActivity); small.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_SMALL); ActionListItem medium = new ActionListItem(mActivity); medium.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_MEDIUM); ActionListItem large = new ActionListItem(mActivity); large.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_LARGE); List<ActionListItem> items = Arrays.asList(small, medium, large); setupPagedListView(items); ActionListItem.ViewHolder smallVH = getViewHolderAtPosition(0); ActionListItem.ViewHolder mediumVH = getViewHolderAtPosition(1); ActionListItem.ViewHolder largeVH = getViewHolderAtPosition(2); assertThat(largeVH.getPrimaryIcon().getHeight(), is(greaterThan( mediumVH.getPrimaryIcon().getHeight()))); assertThat(mediumVH.getPrimaryIcon().getHeight(), is(greaterThan( smallVH.getPrimaryIcon().getHeight()))); } @Test public void testLargePrimaryIconHasNoStartMargin() { ActionListItem item0 = new ActionListItem(mActivity); item0.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_LARGE); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(((ViewGroup.MarginLayoutParams) viewHolder.getPrimaryIcon().getLayoutParams()) .getMarginStart(), is(equalTo(0))); } @Test public void testSmallAndMediumPrimaryIconStartMargin() { ActionListItem item0 = new ActionListItem(mActivity); item0.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_SMALL); ActionListItem item1 = new ActionListItem(mActivity); item1.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_MEDIUM); List<ActionListItem> items = Arrays.asList(item0, item1); setupPagedListView(items); int expected = ApplicationProvider.getApplicationContext().getResources().getDimensionPixelSize( R.dimen.car_keyline_1); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(((ViewGroup.MarginLayoutParams) viewHolder.getPrimaryIcon().getLayoutParams()) .getMarginStart(), is(equalTo(expected))); viewHolder = getViewHolderAtPosition(1); assertThat(((ViewGroup.MarginLayoutParams) viewHolder.getPrimaryIcon().getLayoutParams()) .getMarginStart(), is(equalTo(expected))); } @Test public void testSmallPrimaryIconTopMarginRemainsTheSameRegardlessOfTextLength() { final String longText = ApplicationProvider.getApplicationContext().getResources().getString( R.string.over_uxr_text_length_limit); // Single line item. ActionListItem item0 = new ActionListItem(mActivity); item0.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_SMALL); item0.setTitle("one line text"); // Double line item with one line text. ActionListItem item1 = new ActionListItem(mActivity); item1.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_SMALL); item1.setTitle("one line text"); item1.setBody("one line text"); // Double line item with long text. ActionListItem item2 = new ActionListItem(mActivity); item2.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_SMALL); item2.setTitle("one line text"); item2.setBody(longText); // Body text only - long text. ActionListItem item3 = new ActionListItem(mActivity); item3.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_SMALL); item3.setBody(longText); // Body text only - one line text. ActionListItem item4 = new ActionListItem(mActivity); item4.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, ActionListItem.PRIMARY_ACTION_ICON_SIZE_SMALL); item4.setBody("one line text"); List<ActionListItem> items = Arrays.asList(item0, item1, item2, item3, item4); setupPagedListView(items); for (int i = 1; i < items.size(); i++) { onView(withId(R.id.recycler_view)).perform(scrollToPosition(i)); // Implementation uses integer division so it may be off by 1 vs centered vertically. assertThat((double) getViewHolderAtPosition(i - 1).getPrimaryIcon().getTop(), is(closeTo( (double) getViewHolderAtPosition(i).getPrimaryIcon().getTop(), 1.0d))); } } @Test public void testItemNotClickableWithNullOnClickListener() { ActionListItem item = new ActionListItem(mActivity); item.setOnClickListener(null); setupPagedListView(Arrays.asList(item)); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertFalse(viewHolder.itemView.isClickable()); } @Test public void testClickingSupplementalAction_Borderless() { final boolean[] clicked = {false}; ActionListItem item0 = new ActionListItem(mActivity); item0.setPrimaryAction("action", /* showDivider= */ true, v -> clicked[0] = true); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); onView(withId(R.id.recycler_view)).perform( actionOnItemAtPosition(0, clickChildViewWithId(R.id.primary_action_borderless))); assertTrue(clicked[0]); } @Test public void testClickingSupplementalAction() { final boolean[] clicked = {false}; ActionListItem item0 = new ActionListItem(mActivity); item0.setPrimaryAction("action", /* showDivider= */ true, v -> clicked[0] = true); item0.setActionBorderless(false); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); onView(withId(R.id.recycler_view)).perform( actionOnItemAtPosition(0, clickChildViewWithId(R.id.primary_action))); assertTrue(clicked[0]); } @Test public void testClickingBothSupplementalActions_Borderless() { final boolean[] clicked = {false, false}; ActionListItem item0 = new ActionListItem(mActivity); item0.setPrimaryAction("action 1", /* showDivider= */ true, v -> clicked[0] = true); item0.setSecondaryAction("action 2", /* showDivider= */ true, v -> clicked[1] = true); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); onView(withId(R.id.recycler_view)).perform( actionOnItemAtPosition(0, clickChildViewWithId(R.id.primary_action_borderless))); assertTrue(clicked[0]); assertFalse(clicked[1]); onView(withId(R.id.recycler_view)).perform( actionOnItemAtPosition(0, clickChildViewWithId(R.id.secondary_action_borderless))); assertTrue(clicked[1]); } @Test public void testClickingBothSupplementalActions() { final boolean[] clicked = {false, false}; ActionListItem item0 = new ActionListItem(mActivity); item0.setPrimaryAction("action 1", /* showDivider= */ true, v -> clicked[0] = true); item0.setSecondaryAction("action 2", /* showDivider= */ true, v -> clicked[1] = true); item0.setActionBorderless(false); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); onView(withId(R.id.recycler_view)).perform( actionOnItemAtPosition(0, clickChildViewWithId(R.id.primary_action))); assertTrue(clicked[0]); assertFalse(clicked[1]); onView(withId(R.id.recycler_view)).perform( actionOnItemAtPosition(0, clickChildViewWithId(R.id.secondary_action))); assertTrue(clicked[1]); } @Test public void testCustomViewBinderBindsLast() { final String updatedTitle = "updated title"; ActionListItem item0 = new ActionListItem(mActivity); item0.setTitle("original title"); item0.addViewBinder((viewHolder) -> viewHolder.getTitle().setText(updatedTitle)); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getTitle().getText(), is(equalTo(updatedTitle))); } @Test public void testCustomViewBinderOnUnusedViewsHasNoEffect() { ActionListItem item0 = new ActionListItem(mActivity); item0.addViewBinder((viewHolder) -> viewHolder.getBody().setText("text")); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getBody().getVisibility(), is(equalTo(View.GONE))); // Custom binder interacts with body but has no effect. // Expect card height to remain single line. assertThat((double) viewHolder.itemView.getHeight(), is(closeTo( ApplicationProvider.getApplicationContext().getResources().getDimension( R.dimen.car_single_line_list_item_height), 1.0d))); } @Test public void testRevertingViewBinder() throws Throwable { ActionListItem item0 = new ActionListItem(mActivity); item0.setBody("one item"); item0.addViewBinder( (viewHolder) -> viewHolder.getBody().setEllipsize(TextUtils.TruncateAt.END), (viewHolder -> viewHolder.getBody().setEllipsize(null))); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); // Bind view holder to a new item - the customization made by item0 should be reverted. ActionListItem item1 = new ActionListItem(mActivity); item1.setBody("new item"); mActivityRule.runOnUiThread(() -> item1.bind(viewHolder)); assertThat(viewHolder.getBody().getEllipsize(), is(equalTo(null))); } @Test public void testRemovingViewBinder() { ActionListItem item0 = new ActionListItem(mActivity); item0.setBody("one item"); ListItem.ViewBinder<ActionListItem.ViewHolder> binder = (viewHolder) -> viewHolder.getTitle().setEllipsize(TextUtils.TruncateAt.END); item0.addViewBinder(binder); assertTrue(item0.removeViewBinder(binder)); List<ActionListItem> items = Arrays.asList(item0); setupPagedListView(items); assertThat(getViewHolderAtPosition(0).getBody().getEllipsize(), is(equalTo(null))); } @Test public void testNoCarriedOverOnClickListener() throws Throwable { boolean[] clicked = new boolean[] {false}; ActionListItem item0 = new ActionListItem(mActivity); item0.setOnClickListener(v -> clicked[0] = true); setupPagedListView(Arrays.asList(item0)); onView(withId(R.id.recycler_view)).perform( actionOnItemAtPosition(0, click())); assertTrue(clicked[0]); // item1 does not have onClickListener. ActionListItem item1 = new ActionListItem(mActivity); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); // Manually rebind the view holder. mActivityRule.runOnUiThread(() -> item1.bind(viewHolder)); // Reset for testing. clicked[0] = false; onView(withId(R.id.recycler_view)).perform( actionOnItemAtPosition(0, click())); assertFalse(clicked[0]); } @Test public void testUpdateItem() { ActionListItem item = new ActionListItem(mActivity); setupPagedListView(Arrays.asList(item)); String title = "updated title"; item.setTitle(title); refreshUi(); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertThat(viewHolder.getTitle().getText(), is(equalTo(title))); } @Test public void testUxRestrictionsChange() { String longText = mActivity.getString( R.string.over_uxr_text_length_limit); ActionListItem item = new ActionListItem(mActivity); item.setBody(longText); setupPagedListView(Arrays.asList(item)); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); // Default behavior without UXR is unrestricted. assertThat(viewHolder.getBody().getText(), is(equalTo(longText))); viewHolder.onUxRestrictionsChanged(CarUxRestrictionsTestUtils.getFullyRestricted()); refreshUi(); // Verify that the body text length is limited. assertThat(viewHolder.getBody().getText().length(), is(lessThan(longText.length()))); } @Test public void testUxRestrictionsChangesDoNotAlterExistingInputFilters() { InputFilter filter = new InputFilter.AllCaps(Locale.US); String bodyText = "bodytext"; ActionListItem item = new ActionListItem(mActivity); item.setBody(bodyText); item.addViewBinder(vh -> vh.getBody().setFilters(new InputFilter[] {filter})); setupPagedListView(Arrays.asList(item)); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); // Toggle UX restrictions between fully restricted and unrestricted should not affect // existing filters. viewHolder.onUxRestrictionsChanged(CarUxRestrictionsTestUtils.getFullyRestricted()); refreshUi(); assertTrue(Arrays.asList(viewHolder.getBody().getFilters()).contains(filter)); viewHolder.onUxRestrictionsChanged(CarUxRestrictionsTestUtils.getBaseline()); refreshUi(); assertTrue(Arrays.asList(viewHolder.getBody().getFilters()).contains(filter)); } @Test public void testDisabledItemDisablesViewHolder() { ActionListItem item = new ActionListItem(mActivity); item.setOnClickListener(v -> { }); item.setTitle("title"); item.setBody("body"); item.setPrimaryAction("action", false, v -> { }); item.setEnabled(false); setupPagedListView(Arrays.asList(item)); ActionListItem.ViewHolder viewHolder = getViewHolderAtPosition(0); assertFalse(viewHolder.getTitle().isEnabled()); assertFalse(viewHolder.getBody().isEnabled()); assertFalse(viewHolder.getPrimaryAction().isEnabled()); } @Test public void testDisabledItemDoesNotRespondToClick() { // Disabled view will not respond to touch event. // Current test setup makes it hard to test, since clickChildViewWithId() directly calls // performClick() on a view, bypassing the way UI handles disabled state. // We are explicitly setting itemView so test it here. boolean[] clicked = new boolean[]{false}; ActionListItem item = new ActionListItem(mActivity); item.setOnClickListener(v -> clicked[0] = true); item.setEnabled(false); setupPagedListView(Arrays.asList(item)); onView(withId(R.id.recycler_view)).perform( actionOnItemAtPosition(0, click())); assertFalse(clicked[0]); } private boolean isAutoDevice() { PackageManager packageManager = mActivityRule.getActivity().getPackageManager(); return packageManager.hasSystemFeature(PackageManager.FEATURE_AUTOMOTIVE); } private void refreshUi() { try { mActivityRule.runOnUiThread(() -> { mAdapter.notifyDataSetChanged(); }); } catch (Throwable throwable) { throwable.printStackTrace(); throw new RuntimeException(throwable); } // Wait for paged list view to layout by using espresso to scroll to a position. onView(withId(R.id.recycler_view)).perform(scrollToPosition(0)); } private void setupPagedListView(List<ActionListItem> items) { ListItemProvider provider = new ListItemProvider.ListProvider( new ArrayList<>(items)); try { mAdapter = new ListItemAdapter(mActivity, provider); mActivityRule.runOnUiThread(() -> { mPagedListView.setAdapter(mAdapter); }); } catch (Throwable throwable) { throwable.printStackTrace(); throw new RuntimeException(throwable); } refreshUi(); } private ActionListItem.ViewHolder getViewHolderAtPosition(int position) { return (ActionListItem.ViewHolder) mPagedListView.getRecyclerView() .findViewHolderForAdapterPosition(position); } private static ViewAction clickChildViewWithId(final int id) { return new ViewAction() { @Override public Matcher<View> getConstraints() { return null; } @Override public String getDescription() { return "Click on a child view with specific id."; } @Override public void perform(UiController uiController, View view) { View v = view.findViewById(id); v.performClick(); } }; } }
package com.lmj.rpc.serialize; import com.lmj.rpc.exception.SerializationNotExistException; import static com.lmj.rpc.Constants.Serialization.HESSIAN; import static com.lmj.rpc.Constants.Serialization.JSON; import static com.lmj.rpc.Constants.Serialization.MODE; public class SerializationUtils { public static Serialization get(short extraInfo) { switch (extraInfo & MODE) { case HESSIAN: return HessianSerialization.Inner.serialization; case JSON: return JsonSerialization.Inner.serialization; default: throw new SerializationNotExistException(extraInfo); } } }
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.model.content; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; /** * @author Simon Thoresen Hult */ public class DispatchTuningTest { @Test public void requireThatAccessorWork() { DispatchTuning dispatch = new DispatchTuning.Builder() .setMaxHitsPerPartition(69) .setDispatchPolicy("round-robin") .setMinGroupCoverage(7.5) .setMinActiveDocsCoverage(12.5) .build(); assertEquals(69, dispatch.getMaxHitsPerPartition().intValue()); assertEquals(7.5, dispatch.getMinGroupCoverage().doubleValue(), 0.0); assertEquals(12.5, dispatch.getMinActiveDocsCoverage().doubleValue(), 0.0); assertTrue(DispatchTuning.DispatchPolicy.ROUNDROBIN == dispatch.getDispatchPolicy()); } @Test public void requireThatRandomDispatchWork() { DispatchTuning dispatch = new DispatchTuning.Builder() .setDispatchPolicy("random") .build(); assertTrue(DispatchTuning.DispatchPolicy.ADAPTIVE == dispatch.getDispatchPolicy()); assertNull(dispatch.getMinGroupCoverage()); assertNull(dispatch.getMinActiveDocsCoverage()); } @Test public void requireThatWeightedDispatchWork() { DispatchTuning dispatch = new DispatchTuning.Builder() .setDispatchPolicy("adaptive") .build(); assertTrue(DispatchTuning.DispatchPolicy.ADAPTIVE == dispatch.getDispatchPolicy()); assertNull(dispatch.getMinGroupCoverage()); assertNull(dispatch.getMinActiveDocsCoverage()); } @Test public void requireThatDefaultsAreNull() { DispatchTuning dispatch = new DispatchTuning.Builder().build(); assertNull(dispatch.getMaxHitsPerPartition()); assertNull(dispatch.getDispatchPolicy()); assertNull(dispatch.getMinActiveDocsCoverage()); assertNull(dispatch.getMinGroupCoverage()); } }
package com.yanyusong.divideritemdecoration; import android.content.Context; import android.graphics.Color; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import com.yanyusong.divideritemdecoration.y_recycleradapter.GeneralRecyclerViewHolder; import com.yanyusong.divideritemdecoration.y_recycleradapter.Y_ItemEntityList; import com.yanyusong.divideritemdecoration.y_recycleradapter.Y_MultiRecyclerAdapter; import com.yanyusong.divideritemdecoration.y_recycleradapter.Y_OnBind; import com.yanyusong.y_divideritemdecoration.Y_Divider; import com.yanyusong.y_divideritemdecoration.Y_DividerBuilder; import com.yanyusong.y_divideritemdecoration.Y_DividerItemDecoration; import java.util.ArrayList; import java.util.List; /** * Created by mac on 2017/4/6. */ public class LinearLayoutManagerActivity extends AppCompatActivity { private RecyclerView recyclerView; private Y_ItemEntityList itemEntityList = new Y_ItemEntityList(); @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_recyclerview); recyclerView = (RecyclerView) findViewById(R.id.recyclerview); List<String> items = new ArrayList<>(); for (int i = 0; i < 25; i++) { items.add("item" + i); } itemEntityList.addItems(R.layout.item_recyclerview_text, items) .addOnBind(R.layout.item_recyclerview_text, new Y_OnBind() { @Override public void onBindChildViewData(GeneralRecyclerViewHolder holder, Object itemData, int position) { holder.setText(R.id.textView, (String) itemData); } }); recyclerView.setLayoutManager(new LinearLayoutManager(this)); recyclerView.addItemDecoration(new DividerItemDecoration(this)); recyclerView.setAdapter(new Y_MultiRecyclerAdapter(this, itemEntityList)); } class DividerItemDecoration extends Y_DividerItemDecoration { public DividerItemDecoration(Context context) { super(context); } @Override public Y_Divider getDivider(int itemPosition) { Y_Divider divider = null; switch (itemPosition) { case 0: divider = new Y_DividerBuilder() .setBottomSideLine(true, 0xffFF4081, Color.GREEN, 6, 20, 20) .create(); break; case 1: // divider = new Y_DividerBuilder() .setBottomSideLine(true, 0xff666666, 6, 0, 0) .create(); break; case 2: divider = new Y_DividerBuilder() .setBottomSideLine(true, 0xffFF4081, 4, 0, 0) .create(); break; case 3: divider = new Y_DividerBuilder() .setBottomSideLine(true, 0xffFF4081, 6, 16, 16) .create(); break; case 4: divider = new Y_DividerBuilder() .setBottomSideLine(true, 0xffFF4081, 6, 16, 32) .create(); break; case 5: divider = new Y_DividerBuilder() .setBottomSideLine(true, 0xffFF4081, 6, 0, 0) .create(); break; case 6: divider = new Y_DividerBuilder() .setLeftSideLine(true, 0xffFF4081, 3, 0, 0) .setRightSideLine(true, 0xffFF4081, 8, 0, 0) .setBottomSideLine(true, 0xffFF4081, 6, 0, 0) .create(); break; default: divider = new Y_DividerBuilder() .setBottomSideLine(true, 0xff666666, 6, 0, 0) .create(); break; } return divider; } } }
import java.util.*; public class Loops{ public static void main(String[] args){ Scanner sc = new Scanner(System.in); System.out.println("Let's compute the factorial of a number"); int n = sc.nextInt(); long factorial = 1; for(int i = 1; i < n + 1; i++){ factorial *= i; } System.out.println(n + " factorial is equal to " + factorial); System.out.println("Let's see the difference between i-- and --i"); int j = sc.nextInt(); String minusJ = ""; String jMinus = ""; for(int i = j; i > 0; --i){ minusJ += i + " "; } for(int i = j; i > 0; i--){ jMinus += i + " "; } System.out.println("This is what happens if the increment is before " + minusJ); System.out.println("This is what happens if the increment is after " + jMinus); System.out.println("Moving on, let's see what you can do"); int age = sc.nextInt(); Boolean isVotingYear = sc.nextBoolean(); Boolean isTeatotaller = sc.nextBoolean(); if(age > 18 && isVotingYear){ System.out.println("You can vote this year"); } else if(age < 21 || isTeatotaller){ System.out.println("No drink for you!"); } else{ System.out.println("What do you want to drink boss?"): } Calendar c = new GregorianCalendar; int day = calendar.get(Calendar.DAY_OF_WEEK); switch(day){ // Sunday is 0 through Saturday 6 case 1: case 2: case 3: case 4: System.out.println("Today's hours are 7am to 7pn"); break; case 5: System.out.println("Today's hours are 7am to 4:30pm"); break; case 6: case 0: System.out.println("We are closed today."); default: System.out.println("Is that a new day?"); break; } sc.close(); } }
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.iacuc.authorizer; import org.kuali.coeus.sys.framework.auth.task.Task; import org.kuali.coeus.sys.framework.auth.task.TaskAuthorizerBase; import org.kuali.kra.infrastructure.Constants; import org.kuali.kra.infrastructure.PermissionConstants; public class CreateIacucResearchAreaAuthorizer extends TaskAuthorizerBase { @Override public boolean isAuthorized(String userId, Task task) { boolean retVal = hasUnitPermission(userId, Constants.MODULE_NAMESPACE_IACUC, PermissionConstants.MAINTAIN_IACUC_AREA_OF_RESEARCH); return retVal; } }