gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
// Generated from /Users/chanjinpark/GitHub/DesignEvolution/src/main/resources/java-grammar/Java.g4 by ANTLR 4.5
import java.util.HashMap;
import java.util.ArrayList;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
/**
* This interface defines a complete generic visitor for a parse tree produced
* by {@link JavaParser}.
*
* @param <T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
public interface JavaVisitor<T> extends ParseTreeVisitor<T> {
/**
* Visit a parse tree produced by {@link JavaParser#compilationUnit}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCompilationUnit(@NotNull JavaParser.CompilationUnitContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#packageDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPackageDeclaration(@NotNull JavaParser.PackageDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#importDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitImportDeclaration(@NotNull JavaParser.ImportDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeDeclaration(@NotNull JavaParser.TypeDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#modifier}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitModifier(@NotNull JavaParser.ModifierContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classOrInterfaceModifier}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassOrInterfaceModifier(@NotNull JavaParser.ClassOrInterfaceModifierContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableModifier}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableModifier(@NotNull JavaParser.VariableModifierContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassDeclaration(@NotNull JavaParser.ClassDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeParameters}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeParameters(@NotNull JavaParser.TypeParametersContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeParameter}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeParameter(@NotNull JavaParser.TypeParameterContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeBound}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeBound(@NotNull JavaParser.TypeBoundContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumDeclaration(@NotNull JavaParser.EnumDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumConstants}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumConstants(@NotNull JavaParser.EnumConstantsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumConstant}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumConstant(@NotNull JavaParser.EnumConstantContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumBodyDeclarations}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumBodyDeclarations(@NotNull JavaParser.EnumBodyDeclarationsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceDeclaration(@NotNull JavaParser.InterfaceDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeList}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeList(@NotNull JavaParser.TypeListContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassBody(@NotNull JavaParser.ClassBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceBody(@NotNull JavaParser.InterfaceBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classBodyDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassBodyDeclaration(@NotNull JavaParser.ClassBodyDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#memberDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMemberDeclaration(@NotNull JavaParser.MemberDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#methodDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMethodDeclaration(@NotNull JavaParser.MethodDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#genericMethodDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitGenericMethodDeclaration(@NotNull JavaParser.GenericMethodDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constructorDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstructorDeclaration(@NotNull JavaParser.ConstructorDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#genericConstructorDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitGenericConstructorDeclaration(@NotNull JavaParser.GenericConstructorDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#fieldDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFieldDeclaration(@NotNull JavaParser.FieldDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceBodyDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceBodyDeclaration(@NotNull JavaParser.InterfaceBodyDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceMemberDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceMemberDeclaration(@NotNull JavaParser.InterfaceMemberDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstDeclaration(@NotNull JavaParser.ConstDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constantDeclarator}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstantDeclarator(@NotNull JavaParser.ConstantDeclaratorContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#interfaceMethodDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInterfaceMethodDeclaration(@NotNull JavaParser.InterfaceMethodDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#genericInterfaceMethodDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitGenericInterfaceMethodDeclaration(@NotNull JavaParser.GenericInterfaceMethodDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableDeclarators}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableDeclarators(@NotNull JavaParser.VariableDeclaratorsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableDeclarator}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableDeclarator(@NotNull JavaParser.VariableDeclaratorContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableDeclaratorId}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableDeclaratorId(@NotNull JavaParser.VariableDeclaratorIdContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#variableInitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitVariableInitializer(@NotNull JavaParser.VariableInitializerContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#arrayInitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArrayInitializer(@NotNull JavaParser.ArrayInitializerContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enumConstantName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnumConstantName(@NotNull JavaParser.EnumConstantNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#type}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitType(@NotNull JavaParser.TypeContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classOrInterfaceType}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassOrInterfaceType(@NotNull JavaParser.ClassOrInterfaceTypeContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#primitiveType}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPrimitiveType(@NotNull JavaParser.PrimitiveTypeContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeArguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeArguments(@NotNull JavaParser.TypeArgumentsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeArgument}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeArgument(@NotNull JavaParser.TypeArgumentContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#qualifiedNameList}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitQualifiedNameList(@NotNull JavaParser.QualifiedNameListContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#formalParameters}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFormalParameters(@NotNull JavaParser.FormalParametersContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#formalParameterList}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFormalParameterList(@NotNull JavaParser.FormalParameterListContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#formalParameter}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFormalParameter(@NotNull JavaParser.FormalParameterContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#lastFormalParameter}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLastFormalParameter(@NotNull JavaParser.LastFormalParameterContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#methodBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMethodBody(@NotNull JavaParser.MethodBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constructorBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstructorBody(@NotNull JavaParser.ConstructorBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#qualifiedName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitQualifiedName(@NotNull JavaParser.QualifiedNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#literal}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLiteral(@NotNull JavaParser.LiteralContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotation}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotation(@NotNull JavaParser.AnnotationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationName(@NotNull JavaParser.AnnotationNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#elementValuePairs}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitElementValuePairs(@NotNull JavaParser.ElementValuePairsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#elementValuePair}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitElementValuePair(@NotNull JavaParser.ElementValuePairContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#elementValue}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitElementValue(@NotNull JavaParser.ElementValueContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#elementValueArrayInitializer}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitElementValueArrayInitializer(@NotNull JavaParser.ElementValueArrayInitializerContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationTypeDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationTypeDeclaration(@NotNull JavaParser.AnnotationTypeDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationTypeBody}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationTypeBody(@NotNull JavaParser.AnnotationTypeBodyContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationTypeElementDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationTypeElementDeclaration(@NotNull JavaParser.AnnotationTypeElementDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationTypeElementRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationTypeElementRest(@NotNull JavaParser.AnnotationTypeElementRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationMethodOrConstantRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationMethodOrConstantRest(@NotNull JavaParser.AnnotationMethodOrConstantRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationMethodRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationMethodRest(@NotNull JavaParser.AnnotationMethodRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#annotationConstantRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAnnotationConstantRest(@NotNull JavaParser.AnnotationConstantRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#defaultValue}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitDefaultValue(@NotNull JavaParser.DefaultValueContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#block}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitBlock(@NotNull JavaParser.BlockContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#blockStatement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitBlockStatement(@NotNull JavaParser.BlockStatementContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#localVariableDeclarationStatement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLocalVariableDeclarationStatement(@NotNull JavaParser.LocalVariableDeclarationStatementContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#localVariableDeclaration}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLocalVariableDeclaration(@NotNull JavaParser.LocalVariableDeclarationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#statement}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitStatement(@NotNull JavaParser.StatementContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#catchClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCatchClause(@NotNull JavaParser.CatchClauseContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#catchType}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCatchType(@NotNull JavaParser.CatchTypeContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#finallyBlock}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFinallyBlock(@NotNull JavaParser.FinallyBlockContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#resourceSpecification}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitResourceSpecification(@NotNull JavaParser.ResourceSpecificationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#resources}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitResources(@NotNull JavaParser.ResourcesContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#resource}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitResource(@NotNull JavaParser.ResourceContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#switchBlockStatementGroup}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSwitchBlockStatementGroup(@NotNull JavaParser.SwitchBlockStatementGroupContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#switchLabel}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSwitchLabel(@NotNull JavaParser.SwitchLabelContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#forControl}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitForControl(@NotNull JavaParser.ForControlContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#forInit}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitForInit(@NotNull JavaParser.ForInitContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#enhancedForControl}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitEnhancedForControl(@NotNull JavaParser.EnhancedForControlContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#forUpdate}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitForUpdate(@NotNull JavaParser.ForUpdateContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#parExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitParExpression(@NotNull JavaParser.ParExpressionContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#expressionList}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExpressionList(@NotNull JavaParser.ExpressionListContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#statementExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitStatementExpression(@NotNull JavaParser.StatementExpressionContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#constantExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitConstantExpression(@NotNull JavaParser.ConstantExpressionContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#expression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExpression(@NotNull JavaParser.ExpressionContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#primary}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitPrimary(@NotNull JavaParser.PrimaryContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#creator}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCreator(@NotNull JavaParser.CreatorContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#createdName}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCreatedName(@NotNull JavaParser.CreatedNameContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#innerCreator}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitInnerCreator(@NotNull JavaParser.InnerCreatorContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#arrayCreatorRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArrayCreatorRest(@NotNull JavaParser.ArrayCreatorRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#classCreatorRest}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitClassCreatorRest(@NotNull JavaParser.ClassCreatorRestContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#explicitGenericInvocation}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExplicitGenericInvocation(@NotNull JavaParser.ExplicitGenericInvocationContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#nonWildcardTypeArguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNonWildcardTypeArguments(@NotNull JavaParser.NonWildcardTypeArgumentsContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#typeArgumentsOrDiamond}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitTypeArgumentsOrDiamond(@NotNull JavaParser.TypeArgumentsOrDiamondContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#nonWildcardTypeArgumentsOrDiamond}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitNonWildcardTypeArgumentsOrDiamond(@NotNull JavaParser.NonWildcardTypeArgumentsOrDiamondContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#superSuffix}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSuperSuffix(@NotNull JavaParser.SuperSuffixContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#explicitGenericInvocationSuffix}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExplicitGenericInvocationSuffix(@NotNull JavaParser.ExplicitGenericInvocationSuffixContext ctx);
/**
* Visit a parse tree produced by {@link JavaParser#arguments}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArguments(@NotNull JavaParser.ArgumentsContext ctx);
}
| |
package schevo.server.api;
import org.apache.log4j.Logger;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import schevo.UriConfigs;
import schevo.common.Utils;
import schevo.server.SpaceException;
import schevo.server.space.Repository;
import schevo.server.space.RepositoryVersion;
import schevo.server.space.SpacesFsLocal;
import schevo.server.space.Workspace;
/**
* Rest controller for managed spaces i.e. create or get workspace, repository,
* version of repository
*
* @author tomecode.com
*
*/
@RestController
public class SpacesControllerV1 {
private static final Logger log = Logger.getLogger(SpacesControllerV1.class);
private static final SpacesFsLocal spacesFs = SpacesFsLocal.get();
@ResponseBody
@GetMapping(UriConfigs.WORKSPACES_URI)
public final ResponseEntity<?> getWorkspaces() {
return new ResponseEntity<>(new WorkspacesDto(spacesFs.getWorkspaceNames()), HttpStatus.OK);
}
/**
* get info about workspace
*
* @param workspaceName
* @return
*/
@ResponseBody
@GetMapping(UriConfigs.WORKSPACE_URI)
public final ResponseEntity<?> getWorkpace(@PathVariable("workspaceName") String workspaceName) {
log.info("GET /{workspaceName} - @workspaceName=" + workspaceName);
Workspace workspace = spacesFs.getWorkspace(workspaceName);
if (workspace == null) {
log.error("GET /{workspaceName} - @workpsace=" + workspaceName + " not found workspace");
return new ResponseEntity<>(SpaceError.workspaceNotFound(workspaceName), HttpStatus.NOT_FOUND);
}
return new ResponseEntity<>(workspace, HttpStatus.OK);
}
/**
* create new workspace
*
* @param workspaceName
* @return
*/
@ResponseBody
@PostMapping(UriConfigs.WORKSPACE_URI)
public final ResponseEntity<?> newWorkspace(@PathVariable("workspaceName") String workspaceName) {
log.info("POST /{workspaceName} - @workspaceName=" + workspaceName);
try {
Workspace workspace = spacesFs.newWorkspace(workspaceName);
log.info("POST /{workspaceName} - @workpsace=" + workspaceName + " created!");
return new ResponseEntity<>(workspace, HttpStatus.OK);
} catch (SpaceException e) {
log.error("POST /{workspaceName} - @workpsace=" + workspaceName + " failed to create, reason: " + e.getMessage(), e);
return new ResponseEntity<SpaceError>(SpaceError.workspaceExists(workspaceName), HttpStatus.BAD_REQUEST);
}
}
/**
* get info about repository
*
* @param workspaceName
* @param repositoryName
* @return
*/
@ResponseBody
@GetMapping(UriConfigs.REPOSITORY_URI)
public final ResponseEntity<?> getRepository(@PathVariable("workspaceName") String workspaceName, @PathVariable("repositoryName") String repositoryName) {
log.info("GET /{workspaceName}/{repositoryName} - @workspaceName=" + workspaceName);
Workspace workspace = spacesFs.getWorkspace(workspaceName);
if (workspace == null) {
log.error("GET /{workspaceName} - @workpsace=" + workspaceName + " not found workspace");
return new ResponseEntity<>(SpaceError.workspaceNotFound(workspaceName), HttpStatus.NOT_FOUND);
}
// get repository
Repository repository = workspace.getRepository(repositoryName);
if (repository == null) {
log.error("GET /{workspaceName}/{repositoryName} - @repository=" + repositoryName + " not found in workspace");
return new ResponseEntity<>(SpaceError.repositryNotFound(workspaceName, repositoryName), HttpStatus.NOT_FOUND);
}
return new ResponseEntity<>(repository, HttpStatus.OK);
}
/**
* create new repository
*
* @param workspaceName
* @param repositoryName
* @return
*/
@ResponseBody
@PostMapping(UriConfigs.REPOSITORY_URI)
public final ResponseEntity<?> newRepository(@PathVariable("workspaceName") String workspaceName, @PathVariable("repositoryName") String repositoryName) {
log.info("POST /{workspaceName}/{repositoryName} - @workspaceName=" + workspaceName + " @repositoryName=" + repositoryName);
Workspace workspace = spacesFs.getWorkspace(workspaceName);
if (workspace == null) {
log.error("POST /{workspaceName}/{repositoryName} - @workpsace=" + workspaceName + " not found workspace");
return new ResponseEntity<>(SpaceError.workspaceNotFound(workspaceName), HttpStatus.NOT_FOUND);
}
try {
// create new repository
Repository repository = workspace.newRepository(repositoryName);
log.info("POST /{workspaceName}/{repositoryName} - @workspaceName=" + workspaceName + " @repositoryName=" + repositoryName + " created!");
return new ResponseEntity<>(repository, HttpStatus.OK);
} catch (SpaceException e) {
log.error("POST /{workspaceName}/{repositoryName} - @workspaceName=" + workspaceName + " @repositoryName=" + repositoryName + " failed to create: reason: " + e.getMessage(), e);
return new ResponseEntity<>(SpaceError.repositryExists(workspaceName, repositoryName), HttpStatus.BAD_REQUEST);
}
}
/**
* create new version of repository
*
* @param workspaceName
* @param repositoryName
* @param repositoryVersionName
* @return
*/
@ResponseBody
@PostMapping(UriConfigs.REPOSITORY_VERSION_URI)
public final ResponseEntity<?> newRepositoryVersion(@PathVariable("workspaceName") String workspaceName, @PathVariable("repositoryName") String repositoryName, @PathVariable("repositoryVersionName") String repositoryVersionName) {
log.info("POST /{workspaceName}/{repositoryName} - @workspaceName=" + workspaceName + " @repositoryName=" + repositoryName + " @repositoryVersionName=" + repositoryVersionName);
repositoryVersionName = Utils.strOrNull(repositoryVersionName);
if (repositoryVersionName == null) {
return new ResponseEntity<>(new String("Repository version name is empty"), HttpStatus.BAD_REQUEST);
}
Workspace workspace = spacesFs.getWorkspace(workspaceName);
if (workspace == null) {
return new ResponseEntity<>(SpaceError.workspaceNotFound(workspaceName), HttpStatus.NOT_FOUND);
}
Repository repository = workspace.getRepository(repositoryName);
if (repository == null) {
return new ResponseEntity<>(SpaceError.repositryNotFound(workspaceName, repositoryName), HttpStatus.NOT_FOUND);
}
try {
// create new version of repository
RepositoryVersion repositoryVersion = repository.newVersion(repositoryVersionName);
log.info("POST /{workspaceName}/{repositoryName} - @workspaceName=" + workspaceName + " @repositoryName=" + repositoryName + " @repositoryVersionName=" + repositoryVersionName + " created!");
return new ResponseEntity<>(repositoryVersion, HttpStatus.OK);
} catch (SpaceException e) {
log.error("POST /{workspaceName}/{repositoryName} - @workspaceName=" + workspaceName + " @repositoryName=" + repositoryName + " @repositoryVersionName=" + repositoryVersionName + " failed to create: reason: " + e.getMessage(), e);
return new ResponseEntity<>(SpaceError.repositryVersionExists(workspaceName, repositoryName, repositoryVersionName), HttpStatus.BAD_REQUEST);
}
}
/**
* get details about repository version
*
* @param workspaceName
* @param repositoryName
* @param repositoryVersionName
* @return
*/
@ResponseBody
@GetMapping(UriConfigs.REPOSITORY_VERSION_URI)
public final ResponseEntity<?> getRepositoryVersion(@PathVariable("workspaceName") String workspaceName, @PathVariable("repositoryName") String repositoryName, @PathVariable("repositoryVersionName") String repositoryVersionName) {
log.info("GET /{workspaceName}/{repositoryName}/{repositoryVersionName} - @workspaceName=" + workspaceName);
// find workspace
Workspace workspace = spacesFs.getWorkspace(workspaceName);
if (workspace == null) {
return new ResponseEntity<>(SpaceError.workspaceNotFound(workspaceName), HttpStatus.NOT_FOUND);
}
// find repository
Repository repository = workspace.getRepository(repositoryName);
if (repository == null) {
return new ResponseEntity<>(SpaceError.repositryNotFound(workspaceName, repositoryName), HttpStatus.NOT_FOUND);
}
// find version of repository
RepositoryVersion repositoryVersion = repository.getVersion(repositoryVersionName);
if (repositoryVersion == null) {
return new ResponseEntity<>(SpaceError.repositryVersionNotFound(workspaceName, repositoryName, repositoryVersionName), HttpStatus.NOT_FOUND);
}
return new ResponseEntity<>(repositoryVersion, HttpStatus.OK);
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* SamSession.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202111;
public class SamSession implements java.io.Serializable {
private java.lang.String sessionId;
private java.lang.Boolean isVodSession;
private com.google.api.ads.admanager.axis.v202111.StreamCreateRequest streamCreateRequest;
private com.google.api.ads.admanager.axis.v202111.AdBreak[] adBreaks;
private com.google.api.ads.admanager.axis.v202111.DateTime startDateTime;
private java.lang.Long sessionDurationMillis;
private java.lang.Long contentDurationMillis;
public SamSession() {
}
public SamSession(
java.lang.String sessionId,
java.lang.Boolean isVodSession,
com.google.api.ads.admanager.axis.v202111.StreamCreateRequest streamCreateRequest,
com.google.api.ads.admanager.axis.v202111.AdBreak[] adBreaks,
com.google.api.ads.admanager.axis.v202111.DateTime startDateTime,
java.lang.Long sessionDurationMillis,
java.lang.Long contentDurationMillis) {
this.sessionId = sessionId;
this.isVodSession = isVodSession;
this.streamCreateRequest = streamCreateRequest;
this.adBreaks = adBreaks;
this.startDateTime = startDateTime;
this.sessionDurationMillis = sessionDurationMillis;
this.contentDurationMillis = contentDurationMillis;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("adBreaks", getAdBreaks())
.add("contentDurationMillis", getContentDurationMillis())
.add("isVodSession", getIsVodSession())
.add("sessionDurationMillis", getSessionDurationMillis())
.add("sessionId", getSessionId())
.add("startDateTime", getStartDateTime())
.add("streamCreateRequest", getStreamCreateRequest())
.toString();
}
/**
* Gets the sessionId value for this SamSession.
*
* @return sessionId
*/
public java.lang.String getSessionId() {
return sessionId;
}
/**
* Sets the sessionId value for this SamSession.
*
* @param sessionId
*/
public void setSessionId(java.lang.String sessionId) {
this.sessionId = sessionId;
}
/**
* Gets the isVodSession value for this SamSession.
*
* @return isVodSession
*/
public java.lang.Boolean getIsVodSession() {
return isVodSession;
}
/**
* Sets the isVodSession value for this SamSession.
*
* @param isVodSession
*/
public void setIsVodSession(java.lang.Boolean isVodSession) {
this.isVodSession = isVodSession;
}
/**
* Gets the streamCreateRequest value for this SamSession.
*
* @return streamCreateRequest
*/
public com.google.api.ads.admanager.axis.v202111.StreamCreateRequest getStreamCreateRequest() {
return streamCreateRequest;
}
/**
* Sets the streamCreateRequest value for this SamSession.
*
* @param streamCreateRequest
*/
public void setStreamCreateRequest(com.google.api.ads.admanager.axis.v202111.StreamCreateRequest streamCreateRequest) {
this.streamCreateRequest = streamCreateRequest;
}
/**
* Gets the adBreaks value for this SamSession.
*
* @return adBreaks
*/
public com.google.api.ads.admanager.axis.v202111.AdBreak[] getAdBreaks() {
return adBreaks;
}
/**
* Sets the adBreaks value for this SamSession.
*
* @param adBreaks
*/
public void setAdBreaks(com.google.api.ads.admanager.axis.v202111.AdBreak[] adBreaks) {
this.adBreaks = adBreaks;
}
public com.google.api.ads.admanager.axis.v202111.AdBreak getAdBreaks(int i) {
return this.adBreaks[i];
}
public void setAdBreaks(int i, com.google.api.ads.admanager.axis.v202111.AdBreak _value) {
this.adBreaks[i] = _value;
}
/**
* Gets the startDateTime value for this SamSession.
*
* @return startDateTime
*/
public com.google.api.ads.admanager.axis.v202111.DateTime getStartDateTime() {
return startDateTime;
}
/**
* Sets the startDateTime value for this SamSession.
*
* @param startDateTime
*/
public void setStartDateTime(com.google.api.ads.admanager.axis.v202111.DateTime startDateTime) {
this.startDateTime = startDateTime;
}
/**
* Gets the sessionDurationMillis value for this SamSession.
*
* @return sessionDurationMillis
*/
public java.lang.Long getSessionDurationMillis() {
return sessionDurationMillis;
}
/**
* Sets the sessionDurationMillis value for this SamSession.
*
* @param sessionDurationMillis
*/
public void setSessionDurationMillis(java.lang.Long sessionDurationMillis) {
this.sessionDurationMillis = sessionDurationMillis;
}
/**
* Gets the contentDurationMillis value for this SamSession.
*
* @return contentDurationMillis
*/
public java.lang.Long getContentDurationMillis() {
return contentDurationMillis;
}
/**
* Sets the contentDurationMillis value for this SamSession.
*
* @param contentDurationMillis
*/
public void setContentDurationMillis(java.lang.Long contentDurationMillis) {
this.contentDurationMillis = contentDurationMillis;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof SamSession)) return false;
SamSession other = (SamSession) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = true &&
((this.sessionId==null && other.getSessionId()==null) ||
(this.sessionId!=null &&
this.sessionId.equals(other.getSessionId()))) &&
((this.isVodSession==null && other.getIsVodSession()==null) ||
(this.isVodSession!=null &&
this.isVodSession.equals(other.getIsVodSession()))) &&
((this.streamCreateRequest==null && other.getStreamCreateRequest()==null) ||
(this.streamCreateRequest!=null &&
this.streamCreateRequest.equals(other.getStreamCreateRequest()))) &&
((this.adBreaks==null && other.getAdBreaks()==null) ||
(this.adBreaks!=null &&
java.util.Arrays.equals(this.adBreaks, other.getAdBreaks()))) &&
((this.startDateTime==null && other.getStartDateTime()==null) ||
(this.startDateTime!=null &&
this.startDateTime.equals(other.getStartDateTime()))) &&
((this.sessionDurationMillis==null && other.getSessionDurationMillis()==null) ||
(this.sessionDurationMillis!=null &&
this.sessionDurationMillis.equals(other.getSessionDurationMillis()))) &&
((this.contentDurationMillis==null && other.getContentDurationMillis()==null) ||
(this.contentDurationMillis!=null &&
this.contentDurationMillis.equals(other.getContentDurationMillis())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = 1;
if (getSessionId() != null) {
_hashCode += getSessionId().hashCode();
}
if (getIsVodSession() != null) {
_hashCode += getIsVodSession().hashCode();
}
if (getStreamCreateRequest() != null) {
_hashCode += getStreamCreateRequest().hashCode();
}
if (getAdBreaks() != null) {
for (int i=0;
i<java.lang.reflect.Array.getLength(getAdBreaks());
i++) {
java.lang.Object obj = java.lang.reflect.Array.get(getAdBreaks(), i);
if (obj != null &&
!obj.getClass().isArray()) {
_hashCode += obj.hashCode();
}
}
}
if (getStartDateTime() != null) {
_hashCode += getStartDateTime().hashCode();
}
if (getSessionDurationMillis() != null) {
_hashCode += getSessionDurationMillis().hashCode();
}
if (getContentDurationMillis() != null) {
_hashCode += getContentDurationMillis().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(SamSession.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "SamSession"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("sessionId");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "sessionId"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("isVodSession");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "isVodSession"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "boolean"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("streamCreateRequest");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "streamCreateRequest"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "StreamCreateRequest"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("adBreaks");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "adBreaks"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "AdBreak"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
elemField.setMaxOccursUnbounded(true);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("startDateTime");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "startDateTime"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "DateTime"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("sessionDurationMillis");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "sessionDurationMillis"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("contentDurationMillis");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "contentDurationMillis"));
elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| |
/*
* Copyright 2014 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.engine.subsystem.headless.renderer;
import com.google.common.collect.Lists;
import org.terasology.config.Config;
import org.terasology.logic.players.LocalPlayer;
import org.terasology.logic.players.LocalPlayerSystem;
import org.terasology.math.Region3i;
import org.terasology.math.geom.Vector3f;
import org.terasology.math.geom.Vector3i;
import org.terasology.monitoring.PerformanceMonitor;
import org.terasology.registry.CoreRegistry;
import org.terasology.rendering.cameras.Camera;
import org.terasology.rendering.world.ViewDistance;
import org.terasology.rendering.world.WorldRenderer;
import org.terasology.world.WorldProvider;
import org.terasology.world.chunks.ChunkConstants;
import org.terasology.world.chunks.ChunkProvider;
import org.terasology.world.chunks.RenderableChunk;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
public class HeadlessWorldRenderer implements WorldRenderer {
private static final int MAX_CHUNKS = ViewDistance.MEGA.getChunkDistance().x * ViewDistance.MEGA.getChunkDistance().y * ViewDistance.MEGA.getChunkDistance().z;
private WorldProvider worldProvider;
private ChunkProvider chunkProvider;
private Camera noCamera = new NullCamera();
/* CHUNKS */
private boolean pendingChunks;
private final List<RenderableChunk> chunksInProximity = Lists.newArrayListWithCapacity(MAX_CHUNKS);
private Vector3i chunkPos = new Vector3i();
private Config config;
public HeadlessWorldRenderer(WorldProvider worldProvider, ChunkProvider chunkProvider, LocalPlayerSystem localPlayerSystem) {
this.worldProvider = worldProvider;
this.chunkProvider = chunkProvider;
localPlayerSystem.setPlayerCamera(noCamera);
config = CoreRegistry.get(Config.class);
}
@Override
public void onChunkLoaded(Vector3i pos) {
}
@Override
public void onChunkUnloaded(Vector3i pos) {
}
@Override
public Camera getActiveCamera() {
return noCamera;
}
@Override
public Camera getLightCamera() {
return noCamera;
}
@Override
public ChunkProvider getChunkProvider() {
return chunkProvider;
}
@Override
public WorldProvider getWorldProvider() {
return worldProvider;
}
@Override
public void setPlayer(LocalPlayer localPlayer) {
// TODO Auto-generated method stub
}
@Override
public void update(float delta) {
worldProvider.processPropagation();
// Free unused space
PerformanceMonitor.startActivity("Update Chunk Cache");
chunkProvider.completeUpdate();
chunkProvider.beginUpdate();
PerformanceMonitor.endActivity();
PerformanceMonitor.startActivity("Update Close Chunks");
updateChunksInProximity(false);
PerformanceMonitor.endActivity();
}
@Override
public void render(WorldRenderingStage mono) {
// TODO Auto-generated method stub
}
@Override
public void dispose() {
worldProvider.dispose();
}
@Override
public boolean pregenerateChunks() {
// TODO Auto-generated method stub
return false;
}
@Override
public void changeViewDistance(ViewDistance viewDistance) {
// TODO Auto-generated method stub
}
@Override
public float getSunlightValue() {
// TODO Auto-generated method stub
return 0;
}
@Override
public float getBlockLightValue() {
// TODO Auto-generated method stub
return 0;
}
@Override
public float getRenderingLightValueAt(Vector3f vector3f) {
// TODO Auto-generated method stub
return 0;
}
@Override
public float getSunlightValueAt(Vector3f worldPos) {
// TODO Auto-generated method stub
return 0;
}
@Override
public float getBlockLightValueAt(Vector3f worldPos) {
// TODO Auto-generated method stub
return 0;
}
@Override
public float getSmoothedPlayerSunlightValue() {
// TODO Auto-generated method stub
return 0;
}
@Override
public boolean isHeadUnderWater() {
// TODO Auto-generated method stub
return false;
}
@Override
public Vector3f getTint() {
// TODO Auto-generated method stub
return null;
}
@Override
public float getTick() {
// TODO Auto-generated method stub
return 0;
}
@Override
public WorldRenderingStage getCurrentRenderStage() {
// TODO Auto-generated method stub
return null;
}
@Override
public String getMetrics() {
return "";
}
/**
* Updates the list of chunks around the player.
*
* @param force Forces the update
* @return True if the list was changed
*/
public boolean updateChunksInProximity(boolean force) {
Vector3i newChunkPos = calcCamChunkOffset();
// TODO: This should actually be done based on events from the ChunkProvider on new chunk availability/old chunk removal
boolean chunksCurrentlyPending = false;
if (!newChunkPos.equals(chunkPos) || force || pendingChunks) {
Vector3i viewingDistance = config.getRendering().getViewDistance().getChunkDistance();
Region3i viewRegion = Region3i.createFromCenterExtents(newChunkPos, new Vector3i(viewingDistance.x / 2, viewingDistance.y / 2, viewingDistance.z / 2));
if (chunksInProximity.size() == 0 || force || pendingChunks) {
// just add all visible chunks
chunksInProximity.clear();
for (Vector3i chunkPosition : viewRegion) {
RenderableChunk c = chunkProvider.getChunk(chunkPosition);
if (c != null && worldProvider.getLocalView(c.getPosition()) != null) {
chunksInProximity.add(c);
} else {
chunksCurrentlyPending = true;
}
}
} else {
Region3i oldRegion = Region3i.createFromCenterExtents(chunkPos, new Vector3i(viewingDistance.x / 2, viewingDistance.y / 2, viewingDistance.z / 2));
Iterator<Vector3i> chunksForRemove = oldRegion.subtract(viewRegion);
// remove
while (chunksForRemove.hasNext()) {
Vector3i r = chunksForRemove.next();
RenderableChunk c = chunkProvider.getChunk(r);
if (c != null) {
chunksInProximity.remove(c);
c.disposeMesh();
}
}
// add
for (Vector3i chunkPosition : viewRegion) {
RenderableChunk c = chunkProvider.getChunk(chunkPosition);
if (c != null && worldProvider.getLocalView(c.getPosition()) != null) {
chunksInProximity.add(c);
} else {
chunksCurrentlyPending = true;
}
}
}
chunkPos.set(newChunkPos);
pendingChunks = chunksCurrentlyPending;
Collections.sort(chunksInProximity, new ChunkFrontToBackComparator());
return true;
}
return false;
}
/**
* Chunk position of the player.
*
* @return The player offset on the x-axis
*/
private Vector3i calcCamChunkOffset() {
return new Vector3i((int) (getActiveCamera().getPosition().x / ChunkConstants.SIZE_X),
(int) (getActiveCamera().getPosition().y / ChunkConstants.SIZE_Y),
(int) (getActiveCamera().getPosition().z / ChunkConstants.SIZE_Z));
}
private static float distanceToCamera(RenderableChunk chunk) {
Vector3f result = new Vector3f((chunk.getPosition().x + 0.5f) * ChunkConstants.SIZE_X, 0, (chunk.getPosition().z + 0.5f) * ChunkConstants.SIZE_Z);
Vector3f cameraPos = CoreRegistry.get(WorldRenderer.class).getActiveCamera().getPosition();
result.x -= cameraPos.x;
result.z -= cameraPos.z;
return result.length();
}
private static class ChunkFrontToBackComparator implements Comparator<RenderableChunk> {
@Override
public int compare(RenderableChunk o1, RenderableChunk o2) {
double distance = distanceToCamera(o1);
double distance2 = distanceToCamera(o2);
if (o1 == null) {
return -1;
} else if (o2 == null) {
return 1;
}
if (distance == distance2) {
return 0;
}
return distance2 > distance ? -1 : 1;
}
}
}
| |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.model;
import org.hibernate.annotations.ForeignKey;
import org.hibernate.annotations.Index;
import javax.persistence.CascadeType;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Pattern;
import javax.validation.constraints.Size;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
/**
* Represents a product milestone. A single product version, for example "1.0", can be associated with several product
* milestones such as "1.0.0.build1", "1.0.0.build2", etc. A milestone represents the set of work (build records) that was
* performed during a development cycle from the previous milestone until the end of the current milestone.
*/
@Entity
@Table(uniqueConstraints = {
@UniqueConstraint(columnNames = {"version", "productVersion_id"})
})
public class ProductMilestone implements GenericEntity<Integer> {
private static final long serialVersionUID = 6314079319551264379L;
public static final String SEQUENCE_NAME = "product_milestone_id_seq";
@Id
@SequenceGenerator(name = SEQUENCE_NAME, sequenceName = SEQUENCE_NAME, allocationSize = 1)
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = SEQUENCE_NAME)
private Integer id;
/**
* Contains the milestone version string. This consists of a major, minor, and micro
* numeric version followed by an alphanumeric qualifier. For example "1.0.0.ER1".
*/
@Pattern(message="The version should consist of three numeric parts and one alphanumeric qualifier each separated by a dot" , regexp="^[0-9]+\\.[0-9]+\\.[0-9]+\\.[\\w]+$")
@NotNull
@Size(max=255)
private String version;
/**
* The release (or handoff) date of this milestone
*/
private Date endDate;
/**
* The scheduled starting date of this milestone
*/
private Date startingDate;
/**
* The scheduled ending date of this milestone
*/
private Date plannedEndDate;
/**
* URL which can be used to download the product distribution
*/
@Size(max=255)
private String downloadUrl;
/**
* Issue tracker URL containing the set of issues fixed in this milestone
*/
@Size(max=255)
private String issueTrackerUrl;
/**
* The product major.minor version associated with this milestone. After
* initial creation of the milestone, the product version should never change.
*/
@NotNull
@ManyToOne(cascade = { CascadeType.REFRESH })
@ForeignKey(name = "fk_productmilestone_productversion")
@Index(name="idx_productmilestone_productversion")
@JoinColumn(updatable = false)
private ProductVersion productVersion;
@OneToOne(mappedBy = "productMilestone")
private ProductRelease productRelease;
/**
* The builds which were executed/performed during this milestone build cycle. This includes
* failed builds and builds which produced artifacts which were later replaced by subsequent
* builds. The intent of this field is to track total effort of a milestone, so for example,
* failed builds consumed machine and human resources even though they were not delivered with
* the product distribution.
*/
@OneToMany(mappedBy = "productMilestone", fetch = FetchType.EAGER)
private Set<BuildRecord> performedBuilds;
/**
* Set of artifacts which were distributed in this product milestone. At a minimum, this includes
* the runtime artifacts of a product. Some additional artifacts could be included if they
* are supported and could include some
*
* The BuildRecordSets associated with a milestone should be created when the milestone
* is first created, and never updated after that.
*/
@ManyToMany
@JoinTable(name = "product_milestone_distributed_artifacts_map", joinColumns = {
@JoinColumn(name = "product_milestone_id", referencedColumnName = "id") }, inverseJoinColumns = {
@JoinColumn(name = "artifact_id", referencedColumnName = "id") })
@ForeignKey(name = "fk_product_milestone_distributed_artifacts_map", inverseName = "fk_distributed_artifacts_product_milestone_map")
@Index(name = "idx_product_milestone_distributed_artifacts_map", columnNames = { "product_milestone_id",
"artifact_id" })
private Set<Artifact> distributedArtifacts;
@Override
public Integer getId() {
return id;
}
@Override
public void setId(Integer id) {
this.id = id;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public ProductVersion getProductVersion() {
return productVersion;
}
public void setProductVersion(ProductVersion productVersion) {
this.productVersion = productVersion;
}
/**
* The scheduled starting date of this milestone
*
* @return a Date
*/
public Date getStartingDate() {
return startingDate;
}
public void setStartingDate(Date startingDate) {
this.startingDate = startingDate;
}
/**
* The scheduled ending date of this milestone
*
* @return a Date
*/
public Date getPlannedEndDate() {
return plannedEndDate;
}
public void setPlannedEndDate(Date plannedEndDate) {
this.plannedEndDate = plannedEndDate;
}
/**
* The release (or handoff) date of this milestone
*
* @return a Date
*/
public Date getEndDate() {
return endDate;
}
public void setEndDate(Date endDate) {
this.endDate = endDate;
}
/**
* URL which can be used to download the product distribution
*
* @return The url where this milestone distribution can be downloaded
*/
public String getDownloadUrl() {
return downloadUrl;
}
public void setDownloadUrl(String downloadUrl) {
this.downloadUrl = downloadUrl;
}
public String getIssueTrackerUrl() {
return issueTrackerUrl;
}
public void setIssueTrackerUrl(String issueTrackerUrl) {
this.issueTrackerUrl = issueTrackerUrl;
}
public Set<BuildRecord> getPerformedBuilds() {
return performedBuilds;
}
public void setPerformedBuilds(Set<BuildRecord> performedBuilds) {
this.performedBuilds = performedBuilds;
}
public Set<Artifact> getDistributedArtifacts() {
return distributedArtifacts;
}
public void setDistributedArtifacts(Set<Artifact> distributedArtifacts) {
this.distributedArtifacts = distributedArtifacts;
}
public boolean addDistributedArtifact(Artifact distributedArtifact) {
return this.distributedArtifacts.add(distributedArtifact);
}
public boolean removeDistributedArtifact(Artifact distributedArtifact) {
return this.distributedArtifacts.remove(distributedArtifact);
}
/**
* If this milestone was promoted to a release, this field will be set. Will be null if the milestone was not relesed.
*
* @return the product release or null
*/
public ProductRelease getProductRelease() {
return productRelease;
}
public void setProductRelease(ProductRelease productRelease) {
this.productRelease = productRelease;
}
@Override
public String toString() {
return "ProductMilestone [id=" + id + ", version=" + version + "]";
}
public static class Builder {
private Integer id;
private String version;
private ProductVersion productVersion;
private Date endDate;
private Date startingDate;
private Date plannedEndDate;
private String downloadUrl;
private String issueTrackerUrl;
private Set<BuildRecord> performedBuilds = new HashSet<>();
private Set<Artifact> distributedArtifacts = new HashSet<>();
private ProductRelease productRelease;
private Builder() {
}
public static Builder newBuilder() {
return new Builder();
}
public ProductMilestone build() {
ProductMilestone productMilestone = new ProductMilestone();
productMilestone.setId(id);
productMilestone.setVersion(version);
productMilestone.setEndDate(endDate);
productMilestone.setStartingDate(startingDate);
productMilestone.setPlannedEndDate(plannedEndDate);
productMilestone.setDownloadUrl(downloadUrl);
productMilestone.setIssueTrackerUrl(issueTrackerUrl);
if (productVersion != null) {
productVersion.addProductMilestone(productMilestone);
productMilestone.setProductVersion(productVersion);
}
if (performedBuilds == null) {
performedBuilds = new HashSet<>();
}
productMilestone.setPerformedBuilds(performedBuilds);
if (distributedArtifacts == null) {
distributedArtifacts = new HashSet<>();
}
productMilestone.setDistributedArtifacts(distributedArtifacts);
if (productRelease != null) {
productRelease.setProductMilestone(productMilestone);
productMilestone.setProductRelease(productRelease);
}
return productMilestone;
}
public Builder id(Integer id) {
this.id = id;
return this;
}
public Builder version(String version) {
this.version = version;
return this;
}
public Builder endDate(Date endDate) {
this.endDate = endDate;
return this;
}
public Builder startingDate(Date startingDate) {
this.startingDate = startingDate;
return this;
}
public Builder plannedEndDate(Date plannedEndDate) {
this.plannedEndDate = plannedEndDate;
return this;
}
public Builder downloadUrl(String downloadUrl) {
this.downloadUrl = downloadUrl;
return this;
}
public Builder issueTrackerUrl(String issueTrackerUrl) {
this.issueTrackerUrl = issueTrackerUrl;
return this;
}
public Builder productVersion(ProductVersion productVersion) {
this.productVersion = productVersion;
return this;
}
public Builder performedBuilds(Set<BuildRecord> performedBuilds) {
this.performedBuilds = performedBuilds;
return this;
}
public Builder performedBuild(BuildRecord performedBuild) {
this.performedBuilds.add(performedBuild);
return this;
}
public Builder distributedArtifacts(Set<Artifact> distributedArtifacts) {
this.distributedArtifacts = distributedArtifacts;
return this;
}
public Builder distributedArtifact(Artifact distributedArtifact) {
this.distributedArtifacts.add(distributedArtifact);
return this;
}
public Builder productRelease(ProductRelease productRelease) {
this.productRelease = productRelease;
return this;
}
/**
* Safe way to try to get the associated product name without worrying about null pointers
*
* @return The associated product name, or an empty string
*/
public String getProductName() {
if (productVersion != null && productVersion.getProduct() != null) {
return productVersion.getProduct().getName();
}
return "";
}
}
}
| |
/*
Copyright 2007-2009 Selenium committers
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.environment.DomainHelper;
import org.openqa.selenium.testing.Ignore;
import org.openqa.selenium.testing.JUnit4TestBase;
import org.openqa.selenium.testing.JavascriptEnabled;
import java.net.URI;
import java.util.Date;
import java.util.Random;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import static org.openqa.selenium.testing.Ignore.Driver.ALL;
import static org.openqa.selenium.testing.Ignore.Driver.ANDROID;
import static org.openqa.selenium.testing.Ignore.Driver.CHROME;
import static org.openqa.selenium.testing.Ignore.Driver.HTMLUNIT;
import static org.openqa.selenium.testing.Ignore.Driver.IE;
import static org.openqa.selenium.testing.Ignore.Driver.IPHONE;
import static org.openqa.selenium.testing.Ignore.Driver.OPERA;
import static org.openqa.selenium.testing.Ignore.Driver.REMOTE;
public class CookieImplementationTest extends JUnit4TestBase {
private DomainHelper domainHelper;
private static final Random random = new Random();
@Before
public void setUp() throws Exception {
domainHelper = new DomainHelper(appServer);
assumeTrue(domainHelper.checkIsOnValidHostname());
// This page is the deepest page we go to in the cookie tests
// We go to it to ensure that cookies with /common/... paths are deleted
// Do not write test in this class which use pages other than under /common
// without ensuring that cookies are deleted on those pages as required
try {
driver.get(domainHelper.getUrlForFirstValidHostname("/common/animals"));
} catch (IllegalArgumentException e) {
// Ideally we would throw an IgnoredTestError or something here,
// but our test runner doesn't pay attention to those.
// Rely on the tests skipping themselves if they need to be on a useful page.
return;
}
driver.manage().deleteAllCookies();
assertNoCookiesArePresent();
}
@JavascriptEnabled
@Test
public void testShouldGetCookieByName() {
String key = generateUniqueKey();
String value = "set";
assertCookieIsNotPresentWithName(key);
((JavascriptExecutor) driver).executeScript(
"document.cookie = arguments[0] + '=' + arguments[1];", key, value);
Cookie cookie = driver.manage().getCookieNamed(key);
assertEquals(value, cookie.getValue());
}
@JavascriptEnabled
@Test
public void testShouldBeAbleToAddCookie() {
String key = generateUniqueKey();
String value = "foo";
Cookie cookie = new Cookie.Builder(key, value).build();
assertCookieIsNotPresentWithName(key);
driver.manage().addCookie(cookie);
assertCookieHasValue(key, value);
}
@Test
public void testGetAllCookies() {
String key1 = generateUniqueKey();
String key2 = generateUniqueKey();
assertCookieIsNotPresentWithName(key1);
assertCookieIsNotPresentWithName(key2);
Set<Cookie> cookies = driver.manage().getCookies();
int countBefore = cookies.size();
Cookie one = new Cookie.Builder(key1, "value").build();
Cookie two = new Cookie.Builder(key2, "value").build();
driver.manage().addCookie(one);
driver.manage().addCookie(two);
driver.get(domainHelper.getUrlForFirstValidHostname("simpleTest.html"));
cookies = driver.manage().getCookies();
assertEquals(countBefore + 2, cookies.size());
assertTrue(cookies.contains(one));
assertTrue(cookies.contains(two));
}
@JavascriptEnabled
@Test
public void testDeleteAllCookies() {
((JavascriptExecutor) driver).executeScript("document.cookie = 'foo=set';");
assertSomeCookiesArePresent();
driver.manage().deleteAllCookies();
assertNoCookiesArePresent();
}
@JavascriptEnabled
@Test
public void testDeleteCookieWithName() {
String key1 = generateUniqueKey();
String key2 = generateUniqueKey();
((JavascriptExecutor) driver).executeScript("document.cookie = arguments[0] + '=set';", key1);
((JavascriptExecutor) driver).executeScript("document.cookie = arguments[0] + '=set';", key2);
assertCookieIsPresentWithName(key1);
assertCookieIsPresentWithName(key2);
driver.manage().deleteCookieNamed(key1);
assertCookieIsNotPresentWithName(key1);
assertCookieIsPresentWithName(key2);
}
@Test
public void testShouldNotDeleteCookiesWithASimilarName() {
String cookieOneName = "fish";
Cookie cookie1 = new Cookie.Builder(cookieOneName, "cod").build();
Cookie cookie2 = new Cookie.Builder(cookieOneName + "x", "earth").build();
WebDriver.Options options = driver.manage();
assertCookieIsNotPresentWithName(cookie1.getName());
options.addCookie(cookie1);
options.addCookie(cookie2);
assertCookieIsPresentWithName(cookie1.getName());
options.deleteCookieNamed(cookieOneName);
Set<Cookie> cookies = options.getCookies();
assertFalse(cookies.toString(), cookies.contains(cookie1));
assertTrue(cookies.toString(), cookies.contains(cookie2));
}
@Ignore(OPERA)
@Test
public void testAddCookiesWithDifferentPathsThatAreRelatedToOurs() {
driver.get(domainHelper.getUrlForFirstValidHostname("/common/animals"));
Cookie cookie1 = new Cookie.Builder("fish", "cod").path("/common/animals").build();
Cookie cookie2 = new Cookie.Builder("planet", "earth").path("/common/").build();
WebDriver.Options options = driver.manage();
options.addCookie(cookie1);
options.addCookie(cookie2);
driver.get(domainHelper.getUrlForFirstValidHostname("/common/animals"));
assertCookieIsPresentWithName(cookie1.getName());
assertCookieIsPresentWithName(cookie2.getName());
driver.get(domainHelper.getUrlForFirstValidHostname("/common/simplePage.html"));
assertCookieIsNotPresentWithName(cookie1.getName());
}
@Ignore({CHROME, OPERA})
@Test
public void testCannotGetCookiesWithPathDifferingOnlyInCase() {
String cookieName = "fish";
Cookie cookie = new Cookie.Builder(cookieName, "cod").path("/Common/animals").build();
driver.manage().addCookie(cookie);
driver.get(domainHelper.getUrlForFirstValidHostname("animals"));
assertNull(driver.manage().getCookieNamed(cookieName));
}
@Test
public void testShouldNotGetCookieOnDifferentDomain() {
assumeTrue(domainHelper.checkHasValidAlternateHostname());
String cookieName = "fish";
driver.manage().addCookie(new Cookie.Builder(cookieName, "cod").build());
assertCookieIsPresentWithName(cookieName);
driver.get(domainHelper.getUrlForSecondValidHostname("simpleTest.html"));
assertCookieIsNotPresentWithName(cookieName);
}
@Ignore(value = {ANDROID, CHROME, HTMLUNIT, IE, IPHONE, OPERA},
reason = "Untested browsers.")
@Test
public void testShouldBeAbleToAddToADomainWhichIsRelatedToTheCurrentDomain() {
String cookieName = "name";
assertCookieIsNotPresentWithName(cookieName);
String shorter = domainHelper.getHostName().replaceFirst(".*?\\.", ".");
Cookie cookie = new Cookie.Builder(cookieName, "value").domain(shorter).build();
driver.manage().addCookie(cookie);
assertCookieIsPresentWithName(cookieName);
}
@Ignore(value = {ALL})
@Test
public void testsShouldNotGetCookiesRelatedToCurrentDomainWithoutLeadingPeriod() {
String cookieName = "name";
assertCookieIsNotPresentWithName(cookieName);
String shorter = domainHelper.getHostName().replaceFirst(".*?\\.", "");
Cookie cookie = new Cookie.Builder(cookieName, "value").domain(shorter).build();
driver.manage().addCookie(cookie);
assertCookieIsNotPresentWithName(cookieName);
}
@Ignore({REMOTE, IE})
@Test
public void testShouldBeAbleToIncludeLeadingPeriodInDomainName() throws Exception {
String cookieName = "name";
assertCookieIsNotPresentWithName(cookieName);
String shorter = domainHelper.getHostName().replaceFirst(".*?\\.", ".");
Cookie cookie = new Cookie.Builder("name", "value").domain(shorter).build();
driver.manage().addCookie(cookie);
assertCookieIsPresentWithName(cookieName);
}
@Ignore(IE)
@Test
public void testShouldBeAbleToSetDomainToTheCurrentDomain() throws Exception {
URI url = new URI(driver.getCurrentUrl());
String host = url.getHost() + ":" + url.getPort();
Cookie cookie = new Cookie.Builder("fish", "cod").domain(host).build();
driver.manage().addCookie(cookie);
driver.get(domainHelper.getUrlForFirstValidHostname("javascriptPage.html"));
Set<Cookie> cookies = driver.manage().getCookies();
assertTrue(cookies.contains(cookie));
}
@Test
public void testShouldWalkThePathToDeleteACookie() {
Cookie cookie1 = new Cookie.Builder("fish", "cod").build();
driver.manage().addCookie(cookie1);
driver.get(domainHelper.getUrlForFirstValidHostname("child/childPage.html"));
Cookie cookie2 = new Cookie("rodent", "hamster", "/common/child");
driver.manage().addCookie(cookie2);
driver.get(domainHelper.getUrlForFirstValidHostname("child/grandchild/grandchildPage.html"));
Cookie cookie3 = new Cookie("dog", "dalmation", "/common/child/grandchild/");
driver.manage().addCookie(cookie3);
driver.get(domainHelper.getUrlForFirstValidHostname("child/grandchild/grandchildPage.html"));
driver.manage().deleteCookieNamed("rodent");
assertNull(driver.manage().getCookies().toString(), driver.manage().getCookieNamed("rodent"));
Set<Cookie> cookies = driver.manage().getCookies();
assertEquals(2, cookies.size());
assertTrue(cookies.contains(cookie1));
assertTrue(cookies.contains(cookie3));
driver.manage().deleteAllCookies();
driver.get(domainHelper.getUrlForFirstValidHostname("child/grandchild/grandchildPage.html"));
assertNoCookiesArePresent();
}
@Ignore(IE)
@Test
public void testShouldIgnoreThePortNumberOfTheHostWhenSettingTheCookie() throws Exception {
URI uri = new URI(driver.getCurrentUrl());
String host = String.format("%s:%d", uri.getHost(), uri.getPort());
String cookieName = "name";
assertCookieIsNotPresentWithName(cookieName);
Cookie cookie = new Cookie.Builder(cookieName, "value").domain(host).build();
driver.manage().addCookie(cookie);
assertCookieIsPresentWithName(cookieName);
}
@Ignore(OPERA)
@Test
public void testCookieEqualityAfterSetAndGet() {
driver.get(domainHelper.getUrlForFirstValidHostname("animals"));
driver.manage().deleteAllCookies();
Cookie addedCookie =
new Cookie.Builder("fish", "cod")
.path("/common/animals")
.expiresOn(someTimeInTheFuture())
.build();
driver.manage().addCookie(addedCookie);
Set<Cookie> cookies = driver.manage().getCookies();
Cookie retrievedCookie = null;
for (Cookie temp : cookies) {
if (addedCookie.equals(temp)) {
retrievedCookie = temp;
break;
}
}
assertNotNull("Cookie was null", retrievedCookie);
// Cookie.equals only compares name, domain and path
assertEquals(addedCookie, retrievedCookie);
}
@Ignore(value = {ANDROID, IE, OPERA}, reason =
"Selenium, which use JavaScript to retrieve cookies, cannot return expiry info; " +
"Other suppressed browsers have not been tested.")
@Test
public void testRetainsCookieExpiry() {
Cookie addedCookie =
new Cookie.Builder("fish", "cod")
.path("/common/animals")
.expiresOn(someTimeInTheFuture())
.build();
driver.manage().addCookie(addedCookie);
Cookie retrieved = driver.manage().getCookieNamed("fish");
assertNotNull(retrieved);
assertEquals(addedCookie.getExpiry(), retrieved.getExpiry());
}
@Ignore(ANDROID)
@Test
public void testSettingACookieThatExpiredInThePast() {
long expires = System.currentTimeMillis() - 1000;
Cookie cookie = new Cookie.Builder("expired", "yes").expiresOn(new Date(expires)).build();
driver.manage().addCookie(cookie);
cookie = driver.manage().getCookieNamed("fish");
assertNull(
"Cookie expired before it was set, so nothing should be returned: " + cookie, cookie);
}
@Test
public void testCanSetCookieWithoutOptionalFieldsSet() {
String key = generateUniqueKey();
String value = "foo";
Cookie cookie = new Cookie(key, value);
assertCookieIsNotPresentWithName(key);
driver.manage().addCookie(cookie);
assertCookieHasValue(key, value);
}
@Test
public void testDeleteNotExistedCookie() {
String key = generateUniqueKey();
assertCookieIsNotPresentWithName(key);
driver.manage().deleteCookieNamed(key);
}
private String generateUniqueKey() {
return String.format("key_%d", random.nextInt());
}
private void assertNoCookiesArePresent() {
Set<Cookie> cookies = driver.manage().getCookies();
assertTrue("Cookies were not empty, present: " + cookies,
cookies.isEmpty());
String documentCookie = getDocumentCookieOrNull();
if (documentCookie != null) {
assertEquals("Cookies were not empty", "", documentCookie);
}
}
private void assertSomeCookiesArePresent() {
assertFalse("Cookies were empty",
driver.manage().getCookies().isEmpty());
String documentCookie = getDocumentCookieOrNull();
if (documentCookie != null) {
assertNotSame("Cookies were empty", "", documentCookie);
}
}
private void assertCookieIsNotPresentWithName(final String key) {
assertNull("Cookie was present with name " + key, driver.manage().getCookieNamed(key));
String documentCookie = getDocumentCookieOrNull();
if (documentCookie != null) {
assertThat("Cookie was present with name " + key,
documentCookie,
not(containsString(key + "=")));
}
}
private void assertCookieIsPresentWithName(final String key) {
assertNotNull("Cookie was not present with name " + key, driver.manage().getCookieNamed(key));
String documentCookie = getDocumentCookieOrNull();
if (documentCookie != null) {
assertThat("Cookie was not present with name " + key + ", got: " + documentCookie,
documentCookie,
containsString(key + "="));
}
}
private void assertCookieHasValue(final String key, final String value) {
assertEquals("Cookie had wrong value",
value,
driver.manage().getCookieNamed(key).getValue());
String documentCookie = getDocumentCookieOrNull();
if (documentCookie != null) {
assertThat("Cookie was present with name " + key,
documentCookie,
containsString(key + "=" + value));
}
}
private String getDocumentCookieOrNull() {
if (!(driver instanceof JavascriptExecutor)) {
return null;
}
try {
return (String) ((JavascriptExecutor) driver).executeScript("return document.cookie");
} catch (UnsupportedOperationException e) {
return null;
}
}
private Date someTimeInTheFuture() {
return new Date(System.currentTimeMillis() + 100000);
}
}
| |
package com.github.norwae.whatiread.data;
import java.io.Serializable;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class BookInfo implements Serializable {
private static final long serialVersionUID = 2317828706327659235L;
private String title;
private String subtitle;
private String series;
private String description;
private String publisher;
private URI thumbnail;
private URI thumbnailSmall;
private int pageCount;
private boolean addition;
private List<String> authors;
private String comment;
private String isbn;
public static List<BookInfo> parseJSONMulti(String jsonString)
throws JSONException, URISyntaxException {
List<BookInfo> infos = new ArrayList<BookInfo>();
JSONObject json = new JSONObject(jsonString);
int nr = json.getInt("totalItems");
if (nr > 0) {
JSONArray items = json.getJSONArray("items");
if (items != null) {
for (int i = 0; i < items.length(); i++) {
BookInfo info = new BookInfo();
info.parseJSON(items.getJSONObject(i));
infos.add(info);
}
}
}
return infos;
}
public void parseJSON(String jsonString) throws JSONException,
URISyntaxException {
JSONObject json = new JSONObject(jsonString);
parseJSON(json);
}
public String quoteJSON() throws JSONException {
JSONObject json = new JSONObject();
json.putOpt("comment", comment);
json.putOpt("isbn", isbn);
JSONObject volume = new JSONObject();
json.put("volumeInfo", volume);
volume.putOpt("title", title);
volume.putOpt("subtitle", subtitle);
volume.putOpt("series", series);
volume.putOpt("description", description);
volume.putOpt("publisher", publisher);
volume.putOpt("pageCount", pageCount);
if (authors != null && !authors.isEmpty()) {
JSONArray array = new JSONArray();
volume.put("authors", array);
for (String author : authors) {
array.put(author);
}
}
return json.toString();
}
private void parseJSON(JSONObject obj) throws JSONException,
URISyntaxException {
comment = obj.optString("comment");
isbn = obj.getString("isbn");
addition = !Boolean.TRUE.equals(obj.optBoolean("known"));
obj = obj.getJSONObject("volumeInfo");
title = obj.optString("title");
subtitle = obj.optString("subtitle");
series = obj.optString("series");
description = obj.optString("description");
publisher = obj.optString("publisher");
pageCount = obj.optInt("pageCount");
JSONArray array = obj.optJSONArray("authors");
if (array != null) {
authors = new ArrayList<String>(array.length());
for (int i = 0; i < array.length(); i++) {
authors.add(array.getString(i));
}
}
obj = obj.optJSONObject("imageLinks");
if (obj != null) {
String url = obj.optString("thumbnail");
if (url != null && !url.isEmpty()) {
thumbnail = new URI(url);
}
url = obj.optString("smallThumbnail");
if (url != null && !url.isEmpty()) {
thumbnailSmall = new URI(url);
}
}
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSubtitle() {
return subtitle;
}
public void setSubtitle(String subtitle) {
this.subtitle = subtitle;
}
public String getSeries() {
return series;
}
public void setSeries(String series) {
this.series = series;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getPublisher() {
return publisher;
}
public void setPublisher(String publisher) {
this.publisher = publisher;
}
public List<String> getAuthors() {
return authors;
}
public void setAuthors(List<String> authors) {
this.authors = authors;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public String getIsbn() {
return isbn;
}
public URI getThumbnail() {
return thumbnail;
}
public URI getThumbnailSmall() {
return thumbnailSmall;
}
public int getPageCount() {
return pageCount;
}
public boolean isAddition() {
return addition;
}
}
| |
package at.arz.latte.rodeo.workspace.restapi;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.inject.Inject;
import javax.servlet.ServletContext;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import at.arz.latte.rodeo.infrastructure.RodeoSecurity;
import at.arz.latte.rodeo.rest.XSLTSheet;
import at.arz.latte.rodeo.workspace.Workspace;
@Path("/workspaces")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public class WorkspaceResource {
@Inject
private Workspace workspace;
@Inject
RodeoSecurity security;
@Context
private ServletContext context;
@Context
private UriInfo uriInfo;
@Path("/")
@GET
@XSLTSheet("workspaces")
public Response getRootIndex() {
return getIndex("/");
}
@Path("{path : .+}")
@GET
@XSLTSheet("workspaces")
public Response getIndex(@PathParam("path") String path) {
File workspaceDir = workspace.getWorkspaceDir();
File subdir = new File(workspaceDir, path);
if (subdir.exists()) {
if (subdir.isDirectory()) {
return Response.status(Status.OK).entity(buildDirList(subdir)).build();
} else {
String mimeType = evaluateMimeType(subdir);
if (MediaType.APPLICATION_OCTET_STREAM.equals(mimeType)) {
return Response.ok(subdir, mimeType)
.header("Content-Disposition", "attachment; filename=\"" + subdir.getName() + "\"")
.build();
}
return Response.ok(subdir, mimeType).build();
}
}
return Response.status(Status.NOT_FOUND).type(MediaType.TEXT_PLAIN).entity(path + " not found").build();
}
private String evaluateMimeType(File subdir) {
String mimeType = context.getMimeType(subdir.getName());
if (mimeType == null) {
return MediaType.APPLICATION_OCTET_STREAM;
}
return mimeType;
}
@Path("{path : .+}")
@PUT
public Response uploadFile(@PathParam("path") String path, InputStream inputStream) {
security.assertUserIsAdmin();
File workspaceDir = workspace.getWorkspaceDir();
File destinationFile = new File(workspaceDir, path);
if (destinationFile.exists()) {
return Response.status(Status.CONFLICT).build();
}
try {
if (inputStream.available() == 0) {
createDirectory(destinationFile);
return Response.ok().build();
}
uploadFile(inputStream, destinationFile);
return Response.ok().build();
} catch (IOException e) {
return Response.status(Status.NOT_ACCEPTABLE).build();
}
}
private void uploadFile(InputStream inputStream, File destinationFile) throws IOException {
destinationFile.getParentFile().mkdirs();
destinationFile.createNewFile();
writeStreamToFile(inputStream, destinationFile);
}
private void createDirectory(File destinationFile) {
destinationFile.mkdirs();
}
@Path("{path : .+}")
@POST
public Response updateFile(@PathParam("path") String path, InputStream inputStream) {
security.assertUserIsAdmin();
File workspaceDir = workspace.getWorkspaceDir();
File destinationFile = new File(workspaceDir, path);
if (!destinationFile.exists()) {
return Response.status(Status.NOT_FOUND).type(MediaType.TEXT_PLAIN).entity(path + " not found").build();
}
try {
writeStreamToFile(inputStream, destinationFile);
} catch (IOException e) {
return Response.status(Status.NOT_ACCEPTABLE).build();
}
return Response.ok().build();
}
@Path("{path : .+}")
@DELETE
public Response deleteFileOrDirectoy(@PathParam("path") String path) {
security.assertUserIsAdmin();
File workspaceDir = workspace.getWorkspaceDir();
File destinationfile = new File(workspaceDir, path);
if (!destinationfile.exists()) {
return Response.status(Status.CONFLICT).build();
}
if (destinationfile.isFile()) {
if (destinationfile.delete()) {
return Response.ok().build();
}
return Response.notModified().build();
}
if (destinationfile.isDirectory()) {
deleteDirectory(destinationfile);
}
return Response.ok().build();
}
private void deleteDirectory(File directory) {
if(directory.exists()){
File[] files = directory.listFiles();
for (File file : files) {
if(file.isDirectory()){
deleteDirectory(file);
}else{
file.delete();
}
}
}
}
private void writeStreamToFile(InputStream in, File resource) throws IOException {
FileOutputStream fout = new FileOutputStream(resource);
try {
final byte buffer[] = new byte[1024];
int rc = 0;
while (true) {
rc = in.read(buffer, 0, buffer.length);
if (rc != -1) {
fout.write(buffer, 0, rc);
} else {
break;
}
}
} finally {
fout.close();
}
}
private DirListResult buildDirList(File sourceDirectory) {
String parentPath = buildParentPath(workspace.getWorkspaceDir(), sourceDirectory);
String baseUri = uriInfo.getBaseUri().toString();
String link = uriInfo.getRequestUri().toString();
List<DirItem> list = buildDirItems(sourceDirectory, link);
String path = buildPath(workspace.getWorkspaceDir(), sourceDirectory);
return new DirListResult(baseUri + "workspaces/" + parentPath, path, list);
}
private List<DirItem> buildDirItems(File rootPath, String link) {
File[] files = rootPath.listFiles();
ArrayList<DirItem> list = new ArrayList<DirItem>(files.length);
for (File file : files) {
DirItem item = new DirItem();
item.setDirectory(file.isDirectory());
item.setLastModified(new Date(file.lastModified()));
item.setSize(file.isDirectory() ? 0 : file.length());
item.setName(buildPath(rootPath, file));
item.setLink(link + "/" + file.getName());
list.add(item);
}
return list;
}
private String buildPath(File directory, File file) {
return file.toURI().getPath().substring(directory.toURI().getPath().length());
}
private String buildParentPath(File directory, File file) {
return file.getParentFile().toURI().getPath().substring(directory.toURI().getPath().length());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.more.builder;
import org.more.util.ArrayUtils;
import org.more.util.ClassUtils;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
/**
* <p>
* Assists in implementing {@link Object#toString()} methods using reflection.
* </p>
*
* <p>
* This class uses reflection to determine the fields to append. Because these fields are usually private, the class
* uses {@link java.lang.reflect.AccessibleObject#setAccessible(java.lang.reflect.AccessibleObject[], boolean)} to
* change the visibility of the fields. This will fail under a security manager, unless the appropriate permissions are
* set up correctly.
* </p>
*
* <p>
* A typical invocation for this method would look like:
* </p>
*
* <pre>
* public String toString() {
* return ReflectionToStringBuilder.toString(this);
* }</pre>
*
*
*
* <p>
* You can also use the builder to debug 3rd party objects:
* </p>
*
* <pre>
* System.out.println("An object: " + ReflectionToStringBuilder.toString(anObject));</pre>
*
*
*
* <p>
* A subclass can control field output by overriding the methods:
* <ul>
* <li>{@link #accept(java.lang.reflect.Field)}</li>
* <li>{@link #getValue(java.lang.reflect.Field)}</li>
* </ul>
* </p>
* <p>
* For example, this method does <i>not</i> include the <code>password</code> field in the returned
* <code>String</code>:
* </p>
*
* <pre>
* public String toString() {
* return (new ReflectionToStringBuilder(this) {
* protected boolean accept(Field f) {
* return super.accept(f) && !f.getName().equals("password");
* }
* }).toString();
* }</pre>
*
*
*
* <p>
* The exact format of the <code>toString</code> is determined by the {@link ToStringStyle} passed into the
* constructor.
* </p>
*
* @author Apache Software Foundation
* @author Gary Gregory
* @author Pete Gieser
* @since 2.0
* @version $Id: ReflectionToStringBuilder.java 905636 2010-02-02 14:03:32Z niallp $
*/
public class ReflectionToStringBuilder extends ToStringBuilder {
/**
* <p>
* Builds a <code>toString</code> value using the default <code>ToStringStyle</code> through reflection.
* </p>
*
* <p>
* It uses <code>AccessibleObject.setAccessible</code> to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* Transient members will be not be included, as they are likely derived. Static fields will not be included.
* Superclass fields will be appended.
* </p>
*
* @param object
* the Object to be output
* @return the String result
* @throws IllegalArgumentException
* if the Object is <code>null</code>
*/
public static String toString(Object object) {
return toString(object, null, false, false, null);
}
/**
* <p>
* Builds a <code>toString</code> value through reflection.
* </p>
*
* <p>
* It uses <code>AccessibleObject.setAccessible</code> to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* Transient members will be not be included, as they are likely derived. Static fields will not be included.
* Superclass fields will be appended.
* </p>
*
* <p>
* If the style is <code>null</code>, the default <code>ToStringStyle</code> is used.
* </p>
*
* @param object
* the Object to be output
* @param style
* the style of the <code>toString</code> to create, may be <code>null</code>
* @return the String result
* @throws IllegalArgumentException
* if the Object or <code>ToStringStyle</code> is <code>null</code>
*/
public static String toString(Object object, ToStringStyle style) {
return toString(object, style, false, false, null);
}
/**
* <p>
* Builds a <code>toString</code> value through reflection.
* </p>
*
* <p>
* It uses <code>AccessibleObject.setAccessible</code> to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* If the <code>outputTransients</code> is <code>true</code>, transient members will be output, otherwise they
* are ignored, as they are likely derived fields, and not part of the value of the Object.
* </p>
*
* <p>
* Static fields will not be included. Superclass fields will be appended.
* </p>
*
* <p>
* If the style is <code>null</code>, the default <code>ToStringStyle</code> is used.
* </p>
*
* @param object
* the Object to be output
* @param style
* the style of the <code>toString</code> to create, may be <code>null</code>
* @param outputTransients
* whether to include transient fields
* @return the String result
* @throws IllegalArgumentException
* if the Object is <code>null</code>
*/
public static String toString(Object object, ToStringStyle style, boolean outputTransients) {
return toString(object, style, outputTransients, false, null);
}
/**
* <p>
* Builds a <code>toString</code> value through reflection.
* </p>
*
* <p>
* It uses <code>AccessibleObject.setAccessible</code> to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* If the <code>outputTransients</code> is <code>true</code>, transient fields will be output, otherwise they
* are ignored, as they are likely derived fields, and not part of the value of the Object.
* </p>
*
* <p>
* If the <code>outputStatics</code> is <code>true</code>, static fields will be output, otherwise they are
* ignored.
* </p>
*
* <p>
* Static fields will not be included. Superclass fields will be appended.
* </p>
*
* <p>
* If the style is <code>null</code>, the default <code>ToStringStyle</code> is used.
* </p>
*
* @param object
* the Object to be output
* @param style
* the style of the <code>toString</code> to create, may be <code>null</code>
* @param outputTransients
* whether to include transient fields
* @param outputStatics
* whether to include transient fields
* @return the String result
* @throws IllegalArgumentException
* if the Object is <code>null</code>
* @since 2.1
*/
public static String toString(Object object, ToStringStyle style, boolean outputTransients, boolean outputStatics) {
return toString(object, style, outputTransients, outputStatics, null);
}
/**
* <p>
* Builds a <code>toString</code> value through reflection.
* </p>
*
* <p>
* It uses <code>AccessibleObject.setAccessible</code> to gain access to private fields. This means that it will
* throw a security exception if run under a security manager, if the permissions are not set up correctly. It is
* also not as efficient as testing explicitly.
* </p>
*
* <p>
* If the <code>outputTransients</code> is <code>true</code>, transient fields will be output, otherwise they
* are ignored, as they are likely derived fields, and not part of the value of the Object.
* </p>
*
* <p>
* If the <code>outputStatics</code> is <code>true</code>, static fields will be output, otherwise they are
* ignored.
* </p>
*
* <p>
* Superclass fields will be appended up to and including the specified superclass. A null superclass is treated as
* <code>java.lang.Object</code>.
* </p>
*
* <p>
* If the style is <code>null</code>, the default <code>ToStringStyle</code> is used.
* </p>
*
* @param object
* the Object to be output
* @param style
* the style of the <code>toString</code> to create, may be <code>null</code>
* @param outputTransients
* whether to include transient fields
* @param outputStatics
* whether to include static fields
* @param reflectUpToClass
* the superclass to reflect up to (inclusive), may be <code>null</code>
* @return the String result
* @throws IllegalArgumentException
* if the Object is <code>null</code>
* @since 2.1
*/
public static String toString(Object object, ToStringStyle style, boolean outputTransients, boolean outputStatics, Class<?> reflectUpToClass) {
return new ReflectionToStringBuilder(object, style, null, reflectUpToClass, outputTransients, outputStatics).toString();
}
/**
* Builds a String for a toString method excluding the given field name.
*
* @param object
* The object to "toString".
* @param excludeFieldName
* The field name to exclude
* @return The toString value.
*/
public static String toStringExclude(Object object, final String excludeFieldName) {
return toStringExclude(object, new String[] { excludeFieldName });
}
/**
* Builds a String for a toString method excluding the given field names.
*
* @param object
* The object to "toString".
* @param excludeFieldNames
* The field names to exclude. Null excludes nothing.
* @return The toString value.
*/
public static String toStringExclude(Object object, Collection<?> /*String*/ excludeFieldNames) {
return toStringExclude(object, toNoNullStringArray(excludeFieldNames));
}
/**
* Converts the given Collection into an array of Strings. The returned array does not contain <code>null</code>
* entries. Note that {@link Arrays#sort(Object[])} will throw an {@link NullPointerException} if an array element
* is <code>null</code>.
*
* @param collection
* The collection to convert
* @return A new array of Strings.
*/
static String[] toNoNullStringArray(Collection<?> collection) {
if (collection == null) {
return ArrayUtils.EMPTY_STRING_ARRAY;
}
return toNoNullStringArray(collection.toArray());
}
/**
* Returns a new array of Strings without null elements. Internal method used to normalize exclude lists
* (arrays and collections). Note that {@link Arrays#sort(Object[])} will throw an {@link NullPointerException}
* if an array element is <code>null</code>.
*
* @param array
* The array to check
* @return The given array or a new array without null.
*/
static String[] toNoNullStringArray(Object[] array) {
ArrayList<String> list = new ArrayList<String>(array.length);
for (int i = 0; i < array.length; i++) {
Object e = array[i];
if (e != null) {
list.add(e.toString());
}
}
return (String[]) list.toArray(ArrayUtils.EMPTY_STRING_ARRAY);
}
/**
* Builds a String for a toString method excluding the given field names.
*
* @param object
* The object to "toString".
* @param excludeFieldNames
* The field names to exclude
* @return The toString value.
*/
public static String toStringExclude(Object object, String[] excludeFieldNames) {
return new ReflectionToStringBuilder(object).setExcludeFieldNames(excludeFieldNames).toString();
}
/**
* Whether or not to append static fields.
*/
private boolean appendStatics = false;
/**
* Whether or not to append transient fields.
*/
private boolean appendTransients = false;
/**
* Which field names to exclude from output. Intended for fields like <code>"password"</code>.
*/
private String[] excludeFieldNames;
/**
* The last super class to stop appending fields for.
*/
private Class<?> upToClass = null;
/**
* <p>
* Constructor.
* </p>
*
* <p>
* This constructor outputs using the default style set with <code>setDefaultStyle</code>.
* </p>
*
* @param object
* the Object to build a <code>toString</code> for, must not be <code>null</code>
* @throws IllegalArgumentException
* if the Object passed in is <code>null</code>
*/
public ReflectionToStringBuilder(Object object) {
super(object);
}
/**
* <p>
* Constructor.
* </p>
*
* <p>
* If the style is <code>null</code>, the default style is used.
* </p>
*
* @param object
* the Object to build a <code>toString</code> for, must not be <code>null</code>
* @param style
* the style of the <code>toString</code> to create, may be <code>null</code>
* @throws IllegalArgumentException
* if the Object passed in is <code>null</code>
*/
public ReflectionToStringBuilder(Object object, ToStringStyle style) {
super(object, style);
}
/**
* <p>
* Constructor.
* </p>
*
* <p>
* If the style is <code>null</code>, the default style is used.
* </p>
*
* <p>
* If the buffer is <code>null</code>, a new one is created.
* </p>
*
* @param object
* the Object to build a <code>toString</code> for
* @param style
* the style of the <code>toString</code> to create, may be <code>null</code>
* @param buffer
* the <code>StringBuffer</code> to populate, may be <code>null</code>
* @throws IllegalArgumentException
* if the Object passed in is <code>null</code>
*/
public ReflectionToStringBuilder(Object object, ToStringStyle style, StringBuffer buffer) {
super(object, style, buffer);
}
/**
* Constructor.
*
* @param object
* the Object to build a <code>toString</code> for
* @param style
* the style of the <code>toString</code> to create, may be <code>null</code>
* @param buffer
* the <code>StringBuffer</code> to populate, may be <code>null</code>
* @param reflectUpToClass
* the superclass to reflect up to (inclusive), may be <code>null</code>
* @param outputTransients
* whether to include transient fields
* @param outputStatics
* whether to include static fields
* @since 2.1
*/
public ReflectionToStringBuilder(Object object, ToStringStyle style, StringBuffer buffer, Class<?> reflectUpToClass, boolean outputTransients, boolean outputStatics) {
super(object, style, buffer);
this.setUpToClass(reflectUpToClass);
this.setAppendTransients(outputTransients);
this.setAppendStatics(outputStatics);
}
/**
* Returns whether or not to append the given <code>Field</code>.
* <ul>
* <li>Transient fields are appended only if {@link #isAppendTransients()} returns <code>true</code>.
* <li>Static fields are appended only if {@link #isAppendStatics()} returns <code>true</code>.
* <li>Inner class fields are not appened.</li>
* </ul>
*
* @param field
* The Field to test.
* @return Whether or not to append the given <code>Field</code>.
*/
protected boolean accept(Field field) {
if (field.getName().indexOf(ClassUtils.INNER_CLASS_SEPARATOR_CHAR) != -1) {
// Reject field from inner class.
return false;
}
if (Modifier.isTransient(field.getModifiers()) && !this.isAppendTransients()) {
// Reject transient fields.
return false;
}
if (Modifier.isStatic(field.getModifiers()) && !this.isAppendStatics()) {
// Reject static fields.
return false;
}
if (this.getExcludeFieldNames() != null && Arrays.binarySearch(this.getExcludeFieldNames(), field.getName()) >= 0) {
// Reject fields from the getExcludeFieldNames list.
return false;
}
return true;
}
/**
* <p>
* Appends the fields and values defined by the given object of the given Class.
* </p>
*
* <p>
* If a cycle is detected as an object is "toString()'ed", such an object is rendered as if
* <code>Object.toString()</code> had been called and not implemented by the object.
* </p>
*
* @param clazz
* The class of object parameter
*/
protected void appendFieldsIn(Class<?> clazz) {
if (clazz.isArray()) {
this.reflectionAppendArray(this.getObject());
return;
}
Field[] fields = clazz.getDeclaredFields();
AccessibleObject.setAccessible(fields, true);
for (int i = 0; i < fields.length; i++) {
Field field = fields[i];
String fieldName = field.getName();
if (this.accept(field)) {
try {
// Warning: Field.get(Object) creates wrappers objects
// for primitive types.
Object fieldValue = this.getValue(field);
this.append(fieldName, fieldValue);
} catch (IllegalAccessException ex) {
//this can't happen. Would get a Security exception
// instead
//throw a runtime exception in case the impossible
// happens.
throw new InternalError("Unexpected IllegalAccessException: " + ex.getMessage());
}
}
}
}
/**
* @return Returns the excludeFieldNames.
*/
public String[] getExcludeFieldNames() {
return this.excludeFieldNames;
}
/**
* <p>
* Gets the last super class to stop appending fields for.
* </p>
*
* @return The last super class to stop appending fields for.
*/
public Class<?> getUpToClass() {
return this.upToClass;
}
/**
* <p>
* Calls <code>java.lang.reflect.Field.get(Object)</code>.
* </p>
*
* @param field
* The Field to query.
* @return The Object from the given Field.
*
* @throws IllegalArgumentException
* see {@link java.lang.reflect.Field#get(Object)}
* @throws IllegalAccessException
* see {@link java.lang.reflect.Field#get(Object)}
*
* @see java.lang.reflect.Field#get(Object)
*/
protected Object getValue(Field field) throws IllegalArgumentException, IllegalAccessException {
return field.get(this.getObject());
}
/**
* <p>
* Gets whether or not to append static fields.
* </p>
*
* @return Whether or not to append static fields.
* @since 2.1
*/
public boolean isAppendStatics() {
return this.appendStatics;
}
/**
* <p>
* Gets whether or not to append transient fields.
* </p>
*
* @return Whether or not to append transient fields.
*/
public boolean isAppendTransients() {
return this.appendTransients;
}
/**
* <p>
* Append to the <code>toString</code> an <code>Object</code> array.
* </p>
*
* @param array
* the array to add to the <code>toString</code>
* @return this
*/
public ToStringBuilder reflectionAppendArray(Object array) {
this.getStyle().reflectionAppendArrayDetail(this.getStringBuffer(), null, array);
return this;
}
/**
* <p>
* Sets whether or not to append static fields.
* </p>
*
* @param appendStatics
* Whether or not to append static fields.
* @since 2.1
*/
public void setAppendStatics(boolean appendStatics) {
this.appendStatics = appendStatics;
}
/**
* <p>
* Sets whether or not to append transient fields.
* </p>
*
* @param appendTransients
* Whether or not to append transient fields.
*/
public void setAppendTransients(boolean appendTransients) {
this.appendTransients = appendTransients;
}
/**
* Sets the field names to exclude.
*
* @param excludeFieldNamesParam
* The excludeFieldNames to excluding from toString or <code>null</code>.
* @return <code>this</code>
*/
public ReflectionToStringBuilder setExcludeFieldNames(String[] excludeFieldNamesParam) {
if (excludeFieldNamesParam == null) {
this.excludeFieldNames = null;
} else {
this.excludeFieldNames = toNoNullStringArray(excludeFieldNamesParam);
Arrays.sort(this.excludeFieldNames);
}
return this;
}
/**
* <p>
* Sets the last super class to stop appending fields for.
* </p>
*
* @param clazz
* The last super class to stop appending fields for.
*/
public void setUpToClass(Class<?> clazz) {
if (clazz != null) {
Object object = getObject();
if (object != null && clazz.isInstance(object) == false) {
throw new IllegalArgumentException("Specified class is not a superclass of the object");
}
}
this.upToClass = clazz;
}
/**
* <p>
* Gets the String built by this builder.
* </p>
*
* @return the built string
*/
public String toString() {
if (this.getObject() == null) {
return this.getStyle().getNullText();
}
Class<?> clazz = this.getObject().getClass();
this.appendFieldsIn(clazz);
while (clazz.getSuperclass() != null && clazz != this.getUpToClass()) {
clazz = clazz.getSuperclass();
this.appendFieldsIn(clazz);
}
return super.toString();
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2014 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.go.util;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import com.thoughtworks.go.util.command.CommandLine;
import com.thoughtworks.go.util.command.CommandLineException;
import com.thoughtworks.go.util.command.ConsoleResult;
import com.thoughtworks.go.util.command.EnvironmentVariableContext;
import com.thoughtworks.go.util.command.InMemoryStreamConsumer;
import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer;
import com.thoughtworks.go.util.command.SecretString;
import org.apache.commons.io.FileUtils;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ProcessWrapperTest {
private File tempFolder;
@Before
public void setUp() throws Exception {
tempFolder = FileUtil.createTempFolder();
assertThat(tempFolder.exists(), is(true));
}
@After
public void teardown() throws Exception {
FileUtils.deleteQuietly(tempFolder);
}
@Test
public void shouldReturnTrueWhenAProcessIsRunning(){
Process process = getMockedProcess(mock(OutputStream.class));
when(process.exitValue()).thenThrow(new IllegalThreadStateException());
ProcessWrapper processWrapper = new ProcessWrapper(process, "", "", inMemoryConsumer(), null, null);
assertThat(processWrapper.isRunning(), is(true));
}
@Test
public void shouldReturnFalseWhenAProcessHasExited(){
Process process = getMockedProcess(mock(OutputStream.class));
when(process.exitValue()).thenReturn(1);
ProcessWrapper processWrapper = new ProcessWrapper(process, "", "", inMemoryConsumer(), null, null);
assertThat(processWrapper.isRunning(), is(false));
}
@Test
public void shouldTypeInputToConsole(){
OutputStream processInputStream = new ByteArrayOutputStream();// mock(OutputStream.class);
Process process = getMockedProcess(processInputStream);
ProcessWrapper processWrapper = new ProcessWrapper(process, "", "", inMemoryConsumer(), null, null);
ArrayList<String> inputs = new ArrayList<String>();
inputs.add("input1");
inputs.add("input2");
processWrapper.typeInputToConsole(inputs);
String input = processInputStream.toString();
String[] parts = input.split("\\r?\\n");
assertThat(parts[0], is("input1"));
assertThat(parts[1], is("input2"));
}
@Test
public void shouldThrowExceptionWhenExecutableDoesNotExist() throws IOException {
CommandLine line = CommandLine.createCommandLine("doesnotexist");
try {
ProcessOutputStreamConsumer outputStreamConsumer = inMemoryConsumer();
line.execute(outputStreamConsumer, new EnvironmentVariableContext(), null);
fail("Expected exception");
} catch (CommandLineException e) {
assertThat(e.getMessage(), containsString("Make sure this command can execute manually."));
assertThat(e.getMessage(), containsString("doesnotexist"));
assertThat(e.getResult(), notNullValue());
}
}
@Test
public void shouldTryCommandWithTimeout() throws IOException {
CommandLine line = CommandLine.createCommandLine("doesnotexist");
try {
line.waitForSuccess(100);
fail("Expected Exception");
} catch (Exception e) {
assertThat(e.getMessage(),
containsString("Timeout after 0.1 seconds waiting for command 'doesnotexist'"));
}
}
@Test
public void shouldCollectOutput() throws Exception {
String output = "SYSOUT: Hello World!";
String error = "SYSERR: Some error happened!";
CommandLine line = CommandLine.createCommandLine("ruby").withArgs(script("echo"), output, error);
ConsoleResult result = run(line);
assertThat("Errors: " + result.errorAsString(), result.returnValue(), is(0));
assertThat(result.output(), contains(output));
assertThat(result.error(), contains(error));
}
private String script(final String name) {
return "../util/test-resources/executables/" + name + ".rb";
}
@Test
public void shouldAcceptInputString() throws Exception {
String input = "SYSIN: Hello World!";
CommandLine line = CommandLine.createCommandLine("ruby").withArgs(script("echo-input"));
ConsoleResult result = run(line, input);
assertThat(result.output(), contains(input));
assertThat(result.error().size(), is(0));
}
@Test
public void shouldBeAbleToCompleteInput() throws Exception {
String input1 = "SYSIN: Line 1!";
String input2 = "SYSIN: Line 2!";
CommandLine line = CommandLine.createCommandLine("ruby").withArgs(script("echo-all-input"));
ConsoleResult result = run(line, input1, input2);
assertThat(result.returnValue(), is(0));
assertThat(result.output(), contains("You said: " + input1));
assertThat(result.output(), contains("You said: " + input2));
assertThat(result.error().size(), is(0));
}
@Test
public void shouldReportReturnValueIfProcessFails() {
CommandLine line = CommandLine.createCommandLine("ruby").withArgs(script("nonexistent-script"));
ConsoleResult result = run(line);
assertThat(result.returnValue(), is(1));
}
@Test
public void shouldSetGoServerVariablesIfTheyExist() {
System.setProperty("GO_DEPENDENCY_LABEL_PIPELINE_NAME", "999");
CommandLine line = CommandLine.createCommandLine("ruby").withArgs(script("dump-environment"));
ConsoleResult result = run(line);
assertThat("Errors: " + result.errorAsString(), result.returnValue(), is(0));
assertThat(result.output(), contains("GO_DEPENDENCY_LABEL_PIPELINE_NAME=999"));
}
private ConsoleResult run(CommandLine line, String... inputs) {
InMemoryStreamConsumer outputStreamConsumer = inMemoryConsumer();
EnvironmentVariableContext environmentVariableContext = new EnvironmentVariableContext();
environmentVariableContext.setProperty("GO_DEPENDENCY_LABEL_PIPELINE_NAME", "999", false);
line.addInput(inputs);
ProcessWrapper processWrapper = line.execute(outputStreamConsumer, environmentVariableContext, null);
return new ConsoleResult(processWrapper.waitForExit(),
outputStreamConsumer.getStdLines(),
outputStreamConsumer.getErrLines(), line.getArguments(), new ArrayList<SecretString>());
}
private Matcher<List<String>> contains(final String output) {
return new TypeSafeMatcher<List<String>>() {
public boolean matchesSafely(List<String> lines) {
for (String line : lines) {
if (line.contains(output)) {
return true;
}
}
return false;
}
public void describeTo(Description description) {
description.appendText("to contain " + output);
}
};
}
private Process getMockedProcess(OutputStream outputStream) {
Process process = mock(Process.class);
when(process.getErrorStream()).thenReturn(mock(InputStream.class));
when(process.getInputStream()).thenReturn(mock(InputStream.class));
when(process.getOutputStream()).thenReturn(outputStream);
return process;
}
}
| |
/*
* Licensed to ObjectStyle LLC under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ObjectStyle LLC licenses
* this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.bootique.test.junit;
import io.bootique.BQCoreModule;
import io.bootique.BQModuleOverrideBuilder;
import io.bootique.BQModuleProvider;
import io.bootique.Bootique;
import io.bootique.di.BQModule;
import io.bootique.log.BootLogger;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* @since 0.20
*/
// parameterization is needed to enable covariant return types in subclasses
public abstract class BQTestRuntimeBuilder<T extends BQTestRuntimeBuilder<T>> {
protected Bootique bootique;
protected Map<String, String> properties;
/**
* Properties are used to make test stack independent from a shell environment.
* It allows us be sure that all the vars are controlled within the test and there are no outside influences.
* <p>
* The properties take part on {@link io.bootique.env.Environment} provision in {@link BQCoreModule}.
*/
private static final String EXCLUDE_SYSTEM_VARIABLES = "bq.core.excludeSystemVariables";
private static final String EXCLUDE_SYSTEM_PROPERTIES = "bq.core.excludeSystemProperties";
protected BQTestRuntimeBuilder(String[] args) {
//exclude system variables and properties by setting the properties
final HashMap<String, String> properties = new HashMap<>();
properties.put(EXCLUDE_SYSTEM_PROPERTIES, "true");
properties.put(EXCLUDE_SYSTEM_VARIABLES, "true");
this.properties = properties;
this.bootique = Bootique.app(args).module(createPropertiesProvider());
}
protected BQModuleProvider createPropertiesProvider() {
return new BQModuleProvider() {
@Override
public BQModule module() {
return binder -> BQCoreModule.extend(binder).setProperties(properties);
}
@Override
public String name() {
return "BQTestRuntimeBuilder:properties";
}
};
}
/**
* Appends extra values to the test CLI arguments.
*
* @param args extra args to pass to Bootique.
* @return this instance of test runtime builder.
*/
public T args(String... args) {
bootique.args(args);
return (T) this;
}
/**
* Appends extra values to the test CLI arguments.
*
* @param args extra args to pass to Bootique.
* @return this instance of test runtime builder.
*/
public T args(Collection<String> args) {
bootique.args(args);
return (T) this;
}
/**
* Instructs Bootique to load any modules available on classpath that expose {@link io.bootique.BQModuleProvider}
* provider. Auto-loaded modules will be used in default configuration. Factories within modules will of course be
* configured dynamically from YAML.
*
* @return this instance of test runtime builder.
*/
public T autoLoadModules() {
bootique.autoLoadModules();
return (T) this;
}
/**
* @param bootLogger custom BootLogger to use for a given runtime.
* @return this instance of test runtime builder.
* @since 0.23
*/
public T bootLogger(BootLogger bootLogger) {
bootique.bootLogger(bootLogger);
return (T) this;
}
/**
* @param moduleType custom Module class to add to Bootique DI runtime.
* @return this instance of test runtime builder.
* @see #autoLoadModules()
*/
public T module(Class<? extends BQModule> moduleType) {
bootique.module(moduleType);
return (T) this;
}
/**
* Adds an array of Module types to the Bootique DI runtime. Each type will
* be instantiated by Bootique and added to the DI container.
*
* @param moduleTypes custom Module classes to add to Bootique DI runtime.
* @return this instance of test runtime builder.
* @see #autoLoadModules()
*/
@SafeVarargs
public final T modules(Class<? extends BQModule>... moduleTypes) {
bootique.modules(moduleTypes);
return (T) this;
}
/**
* @param m a module to add to the test runtime.
* @return this instance of test runtime builder.
*/
public T module(BQModule m) {
bootique.module(m);
return (T) this;
}
/**
* Adds an array of Modules to the Bootique DI runtime.
*
* @param modules an array of modules to add to Bootiqie DI runtime.
* @return this instance of test runtime builder.
*/
public T modules(BQModule... modules) {
bootique.modules(modules);
return (T) this;
}
/**
* Adds a Module generated by the provider. Provider may optionally specify
* that the Module overrides services in some other Module.
*
* @param moduleProvider a provider of Module and override spec.
* @return this instance of test runtime builder.
*/
public T module(BQModuleProvider moduleProvider) {
bootique.module(moduleProvider);
return (T) this;
}
/**
* Starts an API call chain to override an array of Modules.
*
* @param overriddenTypes an array of modules whose bindings should be overridden.
* @return {@link BQModuleOverrideBuilder} object to specify a Module
* overriding other modules.
*/
public BQModuleOverrideBuilder<T> override(Class<? extends BQModule>... overriddenTypes) {
BQModuleOverrideBuilder<Bootique> subBuilder = bootique.override(overriddenTypes);
return new BQModuleOverrideBuilder<T>() {
@Override
public T with(Class<? extends BQModule> moduleType) {
subBuilder.with(moduleType);
return (T) BQTestRuntimeBuilder.this;
}
@Override
public T with(BQModule module) {
subBuilder.with(module);
return (T) BQTestRuntimeBuilder.this;
}
};
}
public T property(String key, String value) {
properties.put(key, value);
return (T) this;
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
/*
* CompileIndexOperatorTest.java
*
* Created on March 23, 2005, 4:52 PM
*/
package com.gemstone.gemfire.cache.query.functional;
import com.gemstone.gemfire.cache.Region;
import java.util.ArrayList;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import com.gemstone.gemfire.cache.query.*;
import com.gemstone.gemfire.cache.query.data.Portfolio;
import java.util.HashMap;
/**
*
* @author vikramj
*/
public class IndexOperatorTest extends TestCase {
public IndexOperatorTest(String testName) {
super(testName);
}
protected void setUp() throws Exception {
CacheUtils.startCache();
}
protected void tearDown() throws Exception {
CacheUtils.closeCache();
}
public static Test suite(){
TestSuite suite = new TestSuite(IndexOperatorTest.class);
return suite;
}
public void testWithString() throws Exception {
String str = "xyz";
Character c = (Character)runQuery(str, 0);
if(c.charValue() != 'x')
fail();
Character d = (Character)runQuery(str, 2);
if(d.charValue() != 'z')
fail();
}
public void testWithArray() throws Exception {
Object result = null;
int index = 1;
String stringArray[] = {"a","b"};
result = runQuery(stringArray, index);
if(result == null || !stringArray[index].equals(result))
fail("failed for String array");
int intArray[] = {1,2};
result = runQuery(intArray, index);
if(result == null || intArray[index] != ((Integer)result).intValue())
fail("failed for int array");
Object objectArray[] = {"a","b"};
result = runQuery(objectArray, index);
if(result == null || !objectArray[index].equals(result))
fail("failed for String array");
}
public void testWithList() throws Exception {
ArrayList list = new ArrayList();
list.add("aa");
list.add("bb");
Object result = null;
int index = 1;
result = runQuery(list, index);
if(result == null || !list.get(index).equals(result))
fail("failed for List");
}
public void testWithMap() throws Exception {
HashMap map = new HashMap();
map.put("0",new Integer(11));
map.put("1",new Integer(12));
Object result = null;
Object index = "1";
result = runQuery(map, index);
if(result == null || !map.get(index).equals(result))
fail("failed for Map");
}
public void testWithRegion() throws Exception {
Region region = CacheUtils.createRegion("Portfolio", Portfolio.class);
for(int i=0;i<5;i++){
region.put(""+i, new Portfolio(i));
}
Object result = null;
Object index="2";
result=runQuery(region, index);
if(result == null || !region.get(index).equals(result))
fail("failed for Region");
}
public void testIndexOfIndex() throws Exception{
String array[] = { "abc", "def"};
Query q = CacheUtils.getQueryService().newQuery("$1[0][0]");
Object params[] = {array, new Integer(0)};
Character result = (Character)q.execute(params);
System.out.println(Utils.printResult(result));
if(result == null || result.charValue() != 'a')
fail();
}
public void testWithNULL() throws Exception{
runQuery(null, 0);
runQuery(null, null);
Object objectArray[] = {"a","b"};
try{
runQuery(objectArray, null);
fail();
}catch(TypeMismatchException e){
}
HashMap map = new HashMap();
map.put("0",new Integer(11));
map.put("1",new Integer(12));
Object result = runQuery(map, null);
if(result != null)
fail();
}
public void testWithUNDEFINED() throws Exception{
try{
runQuery(QueryService.UNDEFINED, 0);
}catch(TypeMismatchException e){
fail();
}
try{
runQuery(QueryService.UNDEFINED, QueryService.UNDEFINED);
}catch(TypeMismatchException e){
fail();
}
Object objectArray[] = {"a","b"};
try{
runQuery(objectArray, QueryService.UNDEFINED);
fail();
}catch(TypeMismatchException e){
}
HashMap map = new HashMap();
map.put("0",new Integer(11));
map.put("1",new Integer(12));
Object result = runQuery(map, QueryService.UNDEFINED);
if(result != null)
fail();
}
public void testWithUnsupportedArgs() throws Exception{
try{
runQuery("a","a");
fail();
}catch(TypeMismatchException e){
}
try{
runQuery(new Object(), 0);
fail();
}catch(TypeMismatchException e){
}
try{
Object objectArray[] = {"a","b"};
runQuery(objectArray, new Object());
fail();
}catch(TypeMismatchException e){
}
}
public Object runQuery(Object array, Object index) throws Exception{
Query q = CacheUtils.getQueryService().newQuery("$1[$2]");
Object params[] = {array, index};
Object result = q.execute(params);
System.out.println(Utils.printResult(result));
return result;
}
public Object runQuery(Object array, int index) throws Exception{
Query q = CacheUtils.getQueryService().newQuery("$1[$2]");
Object params[] = {array, new Integer(index)};
Object result = q.execute(params);
System.out.println(Utils.printResult(result));
return result;
}
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import com.google.api.core.BetaApi;
import com.google.api.gax.httpjson.ApiMessage;
import java.util.List;
import java.util.Objects;
import javax.annotation.Generated;
import javax.annotation.Nullable;
@Generated("by GAPIC")
@BetaApi
/**
* Request object for method compute.projects.enableXpnResource. Enable service resource (a.k.a
* service project) for a host project, so that subnets in the host project can be used by instances
* in the service project.
*/
public final class EnableXpnResourceProjectHttpRequest implements ApiMessage {
private final String access_token;
private final String callback;
private final String fields;
private final String key;
private final String prettyPrint;
private final String project;
private final ProjectsEnableXpnResourceRequest projectsEnableXpnResourceRequestResource;
private final String quotaUser;
private final String requestId;
private final String userIp;
private EnableXpnResourceProjectHttpRequest() {
this.access_token = null;
this.callback = null;
this.fields = null;
this.key = null;
this.prettyPrint = null;
this.project = null;
this.projectsEnableXpnResourceRequestResource = null;
this.quotaUser = null;
this.requestId = null;
this.userIp = null;
}
private EnableXpnResourceProjectHttpRequest(
String access_token,
String callback,
String fields,
String key,
String prettyPrint,
String project,
ProjectsEnableXpnResourceRequest projectsEnableXpnResourceRequestResource,
String quotaUser,
String requestId,
String userIp) {
this.access_token = access_token;
this.callback = callback;
this.fields = fields;
this.key = key;
this.prettyPrint = prettyPrint;
this.project = project;
this.projectsEnableXpnResourceRequestResource = projectsEnableXpnResourceRequestResource;
this.quotaUser = quotaUser;
this.requestId = requestId;
this.userIp = userIp;
}
@Override
public Object getFieldValue(String fieldName) {
if ("access_token".equals(fieldName)) {
return access_token;
}
if ("callback".equals(fieldName)) {
return callback;
}
if ("fields".equals(fieldName)) {
return fields;
}
if ("key".equals(fieldName)) {
return key;
}
if ("prettyPrint".equals(fieldName)) {
return prettyPrint;
}
if ("project".equals(fieldName)) {
return project;
}
if ("projectsEnableXpnResourceRequestResource".equals(fieldName)) {
return projectsEnableXpnResourceRequestResource;
}
if ("quotaUser".equals(fieldName)) {
return quotaUser;
}
if ("requestId".equals(fieldName)) {
return requestId;
}
if ("userIp".equals(fieldName)) {
return userIp;
}
return null;
}
@Nullable
@Override
public ProjectsEnableXpnResourceRequest getApiMessageRequestBody() {
return projectsEnableXpnResourceRequestResource;
}
@Nullable
@Override
/**
* The fields that should be serialized (even if they have empty values). If the containing
* message object has a non-null fieldmask, then all the fields in the field mask (and only those
* fields in the field mask) will be serialized. If the containing object does not have a
* fieldmask, then only non-empty fields will be serialized.
*/
public List<String> getFieldMask() {
return null;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/**
* Project ID for this request. It must have the format `{project}/enableXpnResource`.
* \`{project}\` must start with a letter, and contain only letters (\`[A-Za-z]\`), numbers
* (\`[0-9]\`), dashes (\`-\`), * underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus
* (\`+\`) or percent * signs (\`%\`). It must be between 3 and 255 characters in length, and
* it * must not start with \`"goog"\`.
*/
public String getProject() {
return project;
}
public ProjectsEnableXpnResourceRequest getProjectsEnableXpnResourceRequestResource() {
return projectsEnableXpnResourceRequestResource;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public String getRequestId() {
return requestId;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(EnableXpnResourceProjectHttpRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
public static EnableXpnResourceProjectHttpRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final EnableXpnResourceProjectHttpRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new EnableXpnResourceProjectHttpRequest();
}
public static class Builder {
private String access_token;
private String callback;
private String fields;
private String key;
private String prettyPrint;
private String project;
private ProjectsEnableXpnResourceRequest projectsEnableXpnResourceRequestResource;
private String quotaUser;
private String requestId;
private String userIp;
Builder() {}
public Builder mergeFrom(EnableXpnResourceProjectHttpRequest other) {
if (other == EnableXpnResourceProjectHttpRequest.getDefaultInstance()) return this;
if (other.getAccessToken() != null) {
this.access_token = other.access_token;
}
if (other.getCallback() != null) {
this.callback = other.callback;
}
if (other.getFields() != null) {
this.fields = other.fields;
}
if (other.getKey() != null) {
this.key = other.key;
}
if (other.getPrettyPrint() != null) {
this.prettyPrint = other.prettyPrint;
}
if (other.getProject() != null) {
this.project = other.project;
}
if (other.getProjectsEnableXpnResourceRequestResource() != null) {
this.projectsEnableXpnResourceRequestResource =
other.projectsEnableXpnResourceRequestResource;
}
if (other.getQuotaUser() != null) {
this.quotaUser = other.quotaUser;
}
if (other.getRequestId() != null) {
this.requestId = other.requestId;
}
if (other.getUserIp() != null) {
this.userIp = other.userIp;
}
return this;
}
Builder(EnableXpnResourceProjectHttpRequest source) {
this.access_token = source.access_token;
this.callback = source.callback;
this.fields = source.fields;
this.key = source.key;
this.prettyPrint = source.prettyPrint;
this.project = source.project;
this.projectsEnableXpnResourceRequestResource =
source.projectsEnableXpnResourceRequestResource;
this.quotaUser = source.quotaUser;
this.requestId = source.requestId;
this.userIp = source.userIp;
}
/** OAuth 2.0 token for the current user. */
public String getAccessToken() {
return access_token;
}
/** OAuth 2.0 token for the current user. */
public Builder setAccessToken(String access_token) {
this.access_token = access_token;
return this;
}
/** Name of the JavaScript callback function that handles the response. */
public String getCallback() {
return callback;
}
/** Name of the JavaScript callback function that handles the response. */
public Builder setCallback(String callback) {
this.callback = callback;
return this;
}
/** Selector specifying a subset of fields to include in the response. */
public String getFields() {
return fields;
}
/** Selector specifying a subset of fields to include in the response. */
public Builder setFields(String fields) {
this.fields = fields;
return this;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public String getKey() {
return key;
}
/** API key. Required unless you provide an OAuth 2.0 token. */
public Builder setKey(String key) {
this.key = key;
return this;
}
/** Returns response with indentations and line breaks. */
public String getPrettyPrint() {
return prettyPrint;
}
/** Returns response with indentations and line breaks. */
public Builder setPrettyPrint(String prettyPrint) {
this.prettyPrint = prettyPrint;
return this;
}
/**
* Project ID for this request. It must have the format `{project}/enableXpnResource`.
* \`{project}\` must start with a letter, and contain only letters (\`[A-Za-z]\`), numbers
* (\`[0-9]\`), dashes (\`-\`), * underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus
* (\`+\`) or percent * signs (\`%\`). It must be between 3 and 255 characters in length,
* and it * must not start with \`"goog"\`.
*/
public String getProject() {
return project;
}
/**
* Project ID for this request. It must have the format `{project}/enableXpnResource`.
* \`{project}\` must start with a letter, and contain only letters (\`[A-Za-z]\`), numbers
* (\`[0-9]\`), dashes (\`-\`), * underscores (\`_\`), periods (\`.\`), tildes (\`~\`), plus
* (\`+\`) or percent * signs (\`%\`). It must be between 3 and 255 characters in length,
* and it * must not start with \`"goog"\`.
*/
public Builder setProject(String project) {
this.project = project;
return this;
}
public ProjectsEnableXpnResourceRequest getProjectsEnableXpnResourceRequestResource() {
return projectsEnableXpnResourceRequestResource;
}
public Builder setProjectsEnableXpnResourceRequestResource(
ProjectsEnableXpnResourceRequest projectsEnableXpnResourceRequestResource) {
this.projectsEnableXpnResourceRequestResource = projectsEnableXpnResourceRequestResource;
return this;
}
/** Alternative to userIp. */
public String getQuotaUser() {
return quotaUser;
}
/** Alternative to userIp. */
public Builder setQuotaUser(String quotaUser) {
this.quotaUser = quotaUser;
return this;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public String getRequestId() {
return requestId;
}
/**
* An optional request ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has already been
* completed.
*
* <p>For example, consider a situation where you make an initial request and the request times
* out. If you make the request again with the same request ID, the server can check if original
* operation with the same request ID was received, and if so, will ignore the second request.
* This prevents clients from accidentally creating duplicate commitments.
*
* <p>The request ID must be a valid UUID with the exception that zero UUID is not supported
* (00000000-0000-0000-0000-000000000000).
*/
public Builder setRequestId(String requestId) {
this.requestId = requestId;
return this;
}
/** IP address of the end user for whom the API call is being made. */
public String getUserIp() {
return userIp;
}
/** IP address of the end user for whom the API call is being made. */
public Builder setUserIp(String userIp) {
this.userIp = userIp;
return this;
}
public EnableXpnResourceProjectHttpRequest build() {
String missing = "";
if (project == null) {
missing += " project";
}
if (!missing.isEmpty()) {
throw new IllegalStateException("Missing required properties:" + missing);
}
return new EnableXpnResourceProjectHttpRequest(
access_token,
callback,
fields,
key,
prettyPrint,
project,
projectsEnableXpnResourceRequestResource,
quotaUser,
requestId,
userIp);
}
public Builder clone() {
Builder newBuilder = new Builder();
newBuilder.setAccessToken(this.access_token);
newBuilder.setCallback(this.callback);
newBuilder.setFields(this.fields);
newBuilder.setKey(this.key);
newBuilder.setPrettyPrint(this.prettyPrint);
newBuilder.setProject(this.project);
newBuilder.setProjectsEnableXpnResourceRequestResource(
this.projectsEnableXpnResourceRequestResource);
newBuilder.setQuotaUser(this.quotaUser);
newBuilder.setRequestId(this.requestId);
newBuilder.setUserIp(this.userIp);
return newBuilder;
}
}
@Override
public String toString() {
return "EnableXpnResourceProjectHttpRequest{"
+ "access_token="
+ access_token
+ ", "
+ "callback="
+ callback
+ ", "
+ "fields="
+ fields
+ ", "
+ "key="
+ key
+ ", "
+ "prettyPrint="
+ prettyPrint
+ ", "
+ "project="
+ project
+ ", "
+ "projectsEnableXpnResourceRequestResource="
+ projectsEnableXpnResourceRequestResource
+ ", "
+ "quotaUser="
+ quotaUser
+ ", "
+ "requestId="
+ requestId
+ ", "
+ "userIp="
+ userIp
+ "}";
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof EnableXpnResourceProjectHttpRequest) {
EnableXpnResourceProjectHttpRequest that = (EnableXpnResourceProjectHttpRequest) o;
return Objects.equals(this.access_token, that.getAccessToken())
&& Objects.equals(this.callback, that.getCallback())
&& Objects.equals(this.fields, that.getFields())
&& Objects.equals(this.key, that.getKey())
&& Objects.equals(this.prettyPrint, that.getPrettyPrint())
&& Objects.equals(this.project, that.getProject())
&& Objects.equals(
this.projectsEnableXpnResourceRequestResource,
that.getProjectsEnableXpnResourceRequestResource())
&& Objects.equals(this.quotaUser, that.getQuotaUser())
&& Objects.equals(this.requestId, that.getRequestId())
&& Objects.equals(this.userIp, that.getUserIp());
}
return false;
}
@Override
public int hashCode() {
return Objects.hash(
access_token,
callback,
fields,
key,
prettyPrint,
project,
projectsEnableXpnResourceRequestResource,
quotaUser,
requestId,
userIp);
}
}
| |
package com.hubspot.singularity;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.hubspot.mesos.JavaUtils;
public class SingularityS3FormatHelper {
private static final List<String> DISALLOWED_FOR_TASK = ImmutableList.of("%index", "%s", "%filename", "%fileext");
private static final List<String> DISALLOWED_FOR_DEPLOY = ImmutableList.copyOf(Iterables.concat(DISALLOWED_FOR_TASK, ImmutableList.of("%host")));
private static final List<String> DISALLOWED_FOR_REQUEST = ImmutableList.copyOf(Iterables.concat(DISALLOWED_FOR_DEPLOY, ImmutableList.of("%tag", "%deployId")));
public static String getS3KeyFormat(String s3KeyFormat, String requestId) {
s3KeyFormat = s3KeyFormat.replace("%requestId", requestId);
return s3KeyFormat;
}
public static String getS3KeyFormat(String s3KeyFormat, String requestId, String deployId, Optional<String> loggingTag) {
s3KeyFormat = getS3KeyFormat(s3KeyFormat, requestId);
s3KeyFormat = s3KeyFormat.replace("%tag", loggingTag.or(""));
s3KeyFormat = s3KeyFormat.replace("%deployId", deployId);
return s3KeyFormat;
}
public static String getS3KeyFormat(String s3KeyFormat, SingularityTaskId taskId, Optional<String> loggingTag) {
s3KeyFormat = getS3KeyFormat(s3KeyFormat, taskId.getRequestId(), taskId.getDeployId(), loggingTag);
s3KeyFormat = s3KeyFormat.replace("%host", taskId.getSanitizedHost());
s3KeyFormat = s3KeyFormat.replace("%taskId", taskId.toString());
return s3KeyFormat;
}
public static String getKey(String s3KeyFormat, int sequence, long timestamp, String filename, Optional<String> hostname) {
final Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(timestamp);
if (s3KeyFormat.contains("%filename")) {
s3KeyFormat = s3KeyFormat.replace("%filename", filename);
}
if (s3KeyFormat.contains("%fileext")) {
int lastPeriod = filename.lastIndexOf(".");
if (lastPeriod > -1) {
s3KeyFormat = s3KeyFormat.replace("%fileext", filename.substring(lastPeriod));
}
}
if (s3KeyFormat.contains("%guid")) {
s3KeyFormat = s3KeyFormat.replace("%guid", UUID.randomUUID().toString());
}
if (s3KeyFormat.contains("%host")) {
s3KeyFormat = s3KeyFormat.replace("%host", hostname.or(JavaUtils.getHostName().or("unknownhost")));
}
if (s3KeyFormat.contains("%Y")) {
s3KeyFormat = s3KeyFormat.replace("%Y", getYear(calendar.get(Calendar.YEAR)));
}
if (s3KeyFormat.contains("%m")) {
s3KeyFormat = s3KeyFormat.replace("%m", getDayOrMonth(getMonth(calendar)));
}
if (s3KeyFormat.contains("%d")) {
s3KeyFormat = s3KeyFormat.replace("%d", getDayOrMonth(calendar.get(Calendar.DAY_OF_MONTH)));
}
if (s3KeyFormat.contains("%s")) {
s3KeyFormat = s3KeyFormat.replace("%s", Long.toString(timestamp));
}
if (s3KeyFormat.contains("%index")) {
s3KeyFormat = s3KeyFormat.replace("%index", Integer.toString(sequence));
}
return s3KeyFormat;
}
public static String trimKeyFormat(String s3KeyFormat, List<String> disallowedKeys) {
int lowestIndex = s3KeyFormat.length();
for (String disallowedKey : disallowedKeys) {
int index = s3KeyFormat.indexOf(disallowedKey);
if (index != -1 && index < lowestIndex) {
lowestIndex = index;
}
}
if (lowestIndex == -1) {
return s3KeyFormat;
}
return s3KeyFormat.substring(0, lowestIndex);
}
private static int getMonth(Calendar calender) {
return calender.get(Calendar.MONTH) + 1;
}
private static String getYear(int year) {
return Integer.toString(year);
}
private static String getDayOrMonth(int value) {
return String.format("%02d", value);
}
public static Collection<String> getS3KeyPrefixes(String s3KeyFormat, String requestId, String deployId, Optional<String> tag, long start, long end) {
String keyFormat = getS3KeyFormat(s3KeyFormat, requestId, deployId, tag);
keyFormat = trimTaskId(keyFormat, requestId + "-" + deployId);
return getS3KeyPrefixes(keyFormat, DISALLOWED_FOR_DEPLOY, start, end);
}
private static String trimTaskId(String s3KeyFormat, String replaceWith) {
int index = s3KeyFormat.indexOf("%taskId");
if (index > -1) {
s3KeyFormat = s3KeyFormat.substring(0, index) + replaceWith;
}
return s3KeyFormat;
}
public static Collection<String> getS3KeyPrefixes(String s3KeyFormat, String requestId, long start, long end) {
s3KeyFormat = getS3KeyFormat(s3KeyFormat, requestId);
s3KeyFormat = trimTaskId(s3KeyFormat, requestId);
return getS3KeyPrefixes(s3KeyFormat, DISALLOWED_FOR_REQUEST, start, end);
}
private static Collection<String> getS3KeyPrefixes(String s3KeyFormat, List<String> disallowedKeys, long start, long end) {
String trimKeyFormat = trimKeyFormat(s3KeyFormat, disallowedKeys);
int indexOfY = trimKeyFormat.indexOf("%Y");
int indexOfM = trimKeyFormat.indexOf("%m");
int indexOfD = trimKeyFormat.indexOf("%d");
if (indexOfY == -1 && indexOfM == -1 && indexOfD == -1) {
return Collections.singleton(trimKeyFormat);
}
if (indexOfY > -1) {
trimKeyFormat = trimKeyFormat.replace("%Y", "YYYY");
if (indexOfM > -1) {
indexOfM += 2;
}
if (indexOfD > -1) {
indexOfD += 2;
}
}
StringBuilder keyBuilder = new StringBuilder(trimKeyFormat);
Set<String> keyPrefixes = Sets.newHashSet();
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(start);
calendar.set(Calendar.SECOND, 0);
calendar.set(Calendar.MILLISECOND, 0);
calendar.set(Calendar.MINUTE, 0);
calendar.set(Calendar.HOUR_OF_DAY, 0);
while (calendar.getTimeInMillis() < end) {
if (indexOfY > -1) {
keyBuilder.replace(indexOfY, indexOfY + 4, getYear(calendar.get(Calendar.YEAR)));
}
if (indexOfM > -1) {
keyBuilder.replace(indexOfM, indexOfM + 2, getDayOrMonth(getMonth(calendar)));
}
if (indexOfD > -1) {
keyBuilder.replace(indexOfD, indexOfD + 2, getDayOrMonth(calendar.get(Calendar.DAY_OF_MONTH)));
}
keyPrefixes.add(keyBuilder.toString());
if (indexOfD > -1) {
calendar.add(Calendar.DAY_OF_YEAR, 1);
} else if (indexOfM > -1) {
calendar.set(Calendar.DAY_OF_MONTH, 1);
calendar.add(Calendar.MONTH, 1);
} else {
calendar.set(Calendar.MONTH, 0);
calendar.add(Calendar.YEAR, 1);
}
}
return keyPrefixes;
}
public static Collection<String> getS3KeyPrefixes(String s3KeyFormat, SingularityTaskId taskId, Optional<String> tag, long start, long end) {
String keyFormat = getS3KeyFormat(s3KeyFormat, taskId, tag);
return getS3KeyPrefixes(keyFormat, DISALLOWED_FOR_TASK, start, end);
}
}
| |
/*
* Copyright 2013 Guidewire Software, Inc.
*/
package gw.internal.gosu.module.fs;
import gw.config.BaseService;
import gw.config.CommonServices;
import gw.fs.FileFactory;
import gw.fs.IDirectory;
import gw.fs.IFile;
import gw.fs.IResource;
import gw.fs.jar.JarFileDirectoryImpl;
import gw.fs.url.URLFileImpl;
import gw.lang.reflect.module.IFileSystem;
import gw.lang.reflect.module.IModule;
import gw.lang.reflect.module.IProtocolAdapter;
import gw.util.GosuStringUtil;
import java.io.File;
import java.io.IOException;
import java.net.JarURLConnection;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.jar.JarFile;
public class FileSystemImpl extends BaseService implements IFileSystem {
private Map<File, IDirectory> _cachedDirInfo;
private CachingMode _cachingMode;
private FileSystemImpl.IDirectoryResourceExtractor _iDirectoryResourceExtractor;
private FileSystemImpl.IFileResourceExtractor _iFileResourceExtractor;
private Map<String, IProtocolAdapter> _protocolAdapters;
public static boolean USE_NEW_API = false;
// Really gross, non-granular synchronization, but in general we shouldn't
// be hitting this cache much after startup anyway, so it ought to not
// turn into a perf issue
static final Object CACHED_FILE_SYSTEM_LOCK = new Object();
public FileSystemImpl(CachingMode cachingMode) {
_cachedDirInfo = new HashMap<File, IDirectory>();
_cachingMode = cachingMode;
_iDirectoryResourceExtractor = new IDirectoryResourceExtractor();
_iFileResourceExtractor = new IFileResourceExtractor();
_protocolAdapters = new ConcurrentHashMap<String, IProtocolAdapter>();
loadProtocolAdapters();
}
@Override
public IDirectory getIDirectory(File dir) {
if (USE_NEW_API) {
return FileFactory.instance().getIDirectory(dir);
}
if (dir == null) {
return null;
}
dir = normalizeFile(dir);
synchronized (CACHED_FILE_SYSTEM_LOCK) {
IDirectory directory = _cachedDirInfo.get(dir);
if (directory == null) {
directory = createDir( dir );
_cachedDirInfo.put( dir, directory );
}
return directory;
}
}
@Override
public IFile getIFile(File file) {
if (USE_NEW_API) {
return FileFactory.instance().getIFile(file);
}
if (file == null) {
return null;
} else {
return new JavaFileImpl( normalizeFile( file ) );
}
}
public static File normalizeFile(File file) {
// return file;
String absolutePath = file.getAbsolutePath();
List<String> components = new ArrayList<String>();
boolean reallyNormalized = false;
int lastIndex = 0;
for (int i = 0; i < absolutePath.length(); i++) {
char c = absolutePath.charAt(i);
if (c == '/' || c == '\\') {
String component = absolutePath.substring(lastIndex, i);
if (component.equals(".")) {
reallyNormalized = true;
} else if (component.equals("..")) {
components.remove(components.size() - 1);
reallyNormalized = true;
} else {
components.add(component);
}
lastIndex = i + 1;
}
}
String component = absolutePath.substring(lastIndex);
if (component.equals(".")) {
reallyNormalized = true;
} else if (component.equals("..")) {
components.remove(components.size() - 1);
reallyNormalized = true;
} else {
components.add(component);
}
return reallyNormalized ? new File(GosuStringUtil.join(components, "/")) : file;
}
@Override
public void setCachingMode(CachingMode cachingMode) {
synchronized (CACHED_FILE_SYSTEM_LOCK) {
_cachingMode = cachingMode;
for (IDirectory dir : _cachedDirInfo.values()) {
if (dir instanceof JavaDirectoryImpl) {
((JavaDirectoryImpl) dir).setCachingMode(cachingMode);
}
}
}
}
private IDirectory createDir( File dir ) {
if (dir.getName().endsWith(".jar")) {
return new JarFileDirectoryImpl( dir );
} else {
return new JavaDirectoryImpl( dir, _cachingMode );
}
}
public void clearAllCaches() {
if (USE_NEW_API) {
FileFactory.instance().getDefaultPhysicalFileSystem().clearAllCaches();
return;
}
synchronized (CACHED_FILE_SYSTEM_LOCK) {
for (IDirectory dir : _cachedDirInfo.values()) {
dir.clearCaches();
}
}
}
static boolean isDirectory(File f) {
String name = f.getName();
if (isAssumedFileSuffix(getFileSuffix(name))) {
return false;
} else {
return f.isDirectory();
}
}
private static String getFileSuffix(String name) {
int dotIndex = name.lastIndexOf('.');
if (dotIndex == -1) {
return null;
} else {
return name.substring(dotIndex + 1);
}
}
@Override
public IDirectory getIDirectory(URL url) {
if (url == null) {
return null;
}
IProtocolAdapter protocolAdapter = _protocolAdapters.get(url.getProtocol());
if (protocolAdapter != null) {
return protocolAdapter.getIDirectory(url);
}
return _iDirectoryResourceExtractor.getClassResource(url);
}
@Override
public IFile getIFile( URL url ) {
if (url == null) {
return null;
}
IProtocolAdapter protocolAdapter = _protocolAdapters.get(url.getProtocol());
if (protocolAdapter != null) {
return protocolAdapter.getIFile(url);
}
if (USE_NEW_API) {
return FileFactory.instance().getIFile(url);
}
return _iFileResourceExtractor.getClassResource(url);
}
@Override
public IFile getFakeFile(URL url, IModule module) {
return null;
}
private void loadProtocolAdapters() {
ServiceLoader<IProtocolAdapter> adapters = ServiceLoader.load(IProtocolAdapter.class, getClass().getClassLoader());
for (IProtocolAdapter adapter : adapters) {
for (String protocol : adapter.getSupportedProtocols()) {
_protocolAdapters.put(protocol, adapter);
}
}
}
private void loadProtocolAdapter(Collection<IProtocolAdapter> adapters, String adapterName) {
try {
Class<? extends IProtocolAdapter> adapterClass =
Class.forName(adapterName, true, Thread.currentThread().getContextClassLoader()).asSubclass(IProtocolAdapter.class);
adapters.add(adapterClass.newInstance());
} catch (ClassNotFoundException e) {
// It's not in the classpath, just ignore
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
private abstract class ResourceExtractor<J extends IResource> {
J getClassResource(URL _url) {
if (_url == null) {
return null;
}
if ( _url.getProtocol().equals( "file" ) ) {
return getIResourceFromJavaFile(_url);
}
else if ( _url.getProtocol().equals( "jar" ) ) {
JarURLConnection urlConnection;
URL jarFileUrl;
try {
urlConnection = (JarURLConnection) _url.openConnection();
jarFileUrl = urlConnection.getJarFileURL();
} catch (IOException e) {
throw new RuntimeException(e);
}
File dir = new File(jarFileUrl.getFile());
IDirectory jarFileDirectory;
synchronized (CACHED_FILE_SYSTEM_LOCK) {
jarFileDirectory = _cachedDirInfo.get(dir);
if (jarFileDirectory == null) {
jarFileDirectory = createDir( dir );
_cachedDirInfo.put( dir, jarFileDirectory );
}
}
return getIResourceFromJarDirectoryAndEntryName(jarFileDirectory,urlConnection.getEntryName());
}
else if ( _url.getProtocol().equals( "http" ) ) {
J res = getIResourceFromURL(_url);
if ( res != null ) {
return res;
}
}
throw new RuntimeException( "Unrecognized protocol: " + _url.getProtocol() );
}
abstract J getIResourceFromURL(URL location);
abstract J getIResourceFromJarDirectoryAndEntryName(IDirectory jarFS, String entryName);
abstract J getIResourceFromJavaFile(URL location);
protected File getFileFromURL(URL url) {
try {
URI uri = url.toURI();
if ( uri.getFragment() != null ) {
uri = new URI( uri.getScheme(), uri.getSchemeSpecificPart(), null );
}
return new File( uri );
}
catch ( URISyntaxException ex ) {
throw new RuntimeException( ex );
}
catch ( IllegalArgumentException ex ) {
// debug getting IAE only in TH - unable to parse URL with fragment identifier
throw new IllegalArgumentException( "Unable to parse URL " + url.toExternalForm(), ex );
}
}
}
private class IFileResourceExtractor extends ResourceExtractor<IFile> {
IFile getIResourceFromJarDirectoryAndEntryName(IDirectory jarFS, String entryName) {
return jarFS.file(entryName);
}
IFile getIResourceFromJavaFile(URL location) {
return CommonServices.getFileSystem().getIFile( getFileFromURL(location) );
}
@Override
IFile getIResourceFromURL(URL location) {
return new URLFileImpl(location);
}
}
private class IDirectoryResourceExtractor extends ResourceExtractor<IDirectory> {
protected IDirectory getIResourceFromJarDirectoryAndEntryName(IDirectory jarFS, String entryName) {
return jarFS.dir(entryName);
}
protected IDirectory getIResourceFromJavaFile(URL location) {
return CommonServices.getFileSystem().getIDirectory( getFileFromURL(location) );
}
@Override
IDirectory getIResourceFromURL(URL location) {
return null;
}
}
private static final Set<String> FILE_SUFFIXES;
static {
FILE_SUFFIXES = new HashSet<String>();
FILE_SUFFIXES.add("class");
FILE_SUFFIXES.add("eti");
FILE_SUFFIXES.add("etx");
FILE_SUFFIXES.add("gif");
FILE_SUFFIXES.add("gr");
FILE_SUFFIXES.add("grs");
FILE_SUFFIXES.add("gs");
FILE_SUFFIXES.add("gst");
FILE_SUFFIXES.add("gsx");
FILE_SUFFIXES.add("gti");
FILE_SUFFIXES.add("gx");
FILE_SUFFIXES.add("jar");
FILE_SUFFIXES.add("java");
FILE_SUFFIXES.add("pcf");
FILE_SUFFIXES.add("png");
FILE_SUFFIXES.add("properties");
FILE_SUFFIXES.add("tti");
FILE_SUFFIXES.add("ttx");
FILE_SUFFIXES.add("txt");
FILE_SUFFIXES.add("wsdl");
FILE_SUFFIXES.add("xml");
FILE_SUFFIXES.add("xsd");
}
private static boolean isAssumedFileSuffix(String suffix) {
return FILE_SUFFIXES.contains(suffix);
}
}
| |
/**
* Copyright (c) 2014 SQUARESPACE, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squarespace.less.exec;
import com.squarespace.less.LessContext;
import com.squarespace.less.LessException;
import com.squarespace.less.core.Buffer;
import com.squarespace.less.core.FlexList;
import com.squarespace.less.model.Block;
import com.squarespace.less.model.BlockNode;
import com.squarespace.less.model.Definition;
/**
* Represents the current execution environment.
*/
public class ExecEnv {
/**
* Context for the current compile.
*/
protected final LessContext ctx;
/**
* Stack frames for execution.
*/
protected final FlexList<Block> frames;
/**
* List of warnings emitted during execution.
*/
protected FlexList<String> warnings;
/**
* Exception that terminated execution, if any.
*/
private LessException error;
/**
* Indicates whether strict math mode is currently enabled.
*/
private boolean strictMath;
/**
* When evaluating a rule we clear this flag and evaluate the rule's
* right-hand side expression. If any definition is encountered which
* is marked important, it will set this flag.
*/
private boolean importantFlagged;
/**
* Constructs an instance associated with the given compile context.
*/
public ExecEnv(LessContext ctx) {
this(ctx, new FlexList<Block>(64), null);
}
/**
* Constructs an instance associated with the given compile context and
* initial stack contents.
*/
public ExecEnv(LessContext ctx, FlexList<Block> initialStack) {
this(ctx, initialStack, null);
}
/**
* Constructs an instance associated with the given compile context,
* initial stack contents, and warning list.
*/
public ExecEnv(LessContext ctx, FlexList<Block> initialStack, FlexList<String> warnings) {
this.ctx = ctx;
this.frames = initialStack;
this.warnings = warnings;
}
/**
* Returns the context associated with this compile.
*/
public LessContext context() {
return ctx;
}
/**
* Returns a new {@link ExecEnv} instance with a copy of the stack frames and warnings.
*/
public ExecEnv copy() {
return new ExecEnv(ctx, frames.copy(), warnings);
}
/**
* Current stack depth.
*/
public int depth() {
return frames.size();
}
/**
* Indicates an error has been produced.
*/
public boolean hasError() {
return error != null;
}
/**
* Indicates whether strict math mode is currently enabled.
*/
public boolean isStrictMath() {
return strictMath;
}
/**
* Enables / disables strict math mode.
*/
public void setStrictMath(boolean flag) {
this.strictMath = flag;
}
/**
* Sets the important flag.
*/
public void setImportantFlag(boolean flag) {
this.importantFlagged = flag;
}
/**
* Indicates whether the important flag was set during evaluation.
*/
public boolean importantFlagged() {
return importantFlagged;
}
/**
* Returns the execution error, if any.
*/
public LessException error() {
return error;
}
/**
* Sets the execution error.
*/
public void error(LessException exc) {
error = exc;
}
/**
* Pushes a list of frames onto the stack.
*/
public void append(FlexList<Block> other) {
frames.append(other);
}
/**
* Adds a warning to the list.
*/
public void addWarning(String warning) {
if (warnings == null) {
warnings = new FlexList<>();
}
warnings.append(warning);
}
/**
* Return the formatted list of warnings.
*/
public String warnings() {
if (warnings == null || warnings.isEmpty()) {
return null;
}
StringBuilder buf = new StringBuilder();
int size = warnings.size();
for (int i = 0; i < size; i++) {
if (i > 0) {
buf.append(", ");
}
buf.append(warnings.get(i));
}
warnings.clear();
return buf.toString();
}
/**
* Returns the stack frames.
*/
public FlexList<Block> frames() {
return frames;
}
/**
* Iterate up the stack, trying to find the given variable definition in each block.
*/
public Definition resolveDefinition(String name) throws LessException {
int size = frames.size();
for (int i = size - 1; i >= 0; i--) {
Definition def = frames.get(i).resolveDefinition(name);
// Future: pragma to skip over circular references, looking in a higher stack frame.
// To do that we check if the definition is evaluating (circular) and skip it:
//
// if (def != null && pragmaSkipCircular() && !def.evaluating()) {
// ...
// If definition exists, return it
if (def != null) {
return def;
}
}
return null;
}
/**
* Iterate up the stack, trying to resolve the mixin against each block.
*/
public boolean resolveMixins(MixinResolver resolver) throws LessException {
int size = frames.size();
for (int i = size - 1; i >= 0; i--) {
if (resolver.match(frames.get(i))) {
return true;
}
}
return false;
}
/**
* Push a block node onto the stack.
*/
public void push(BlockNode blockNode) throws LessException {
frames.push(blockNode.block());
}
/**
* Push a block onto the stack.
*/
public void push(Block block) throws LessException {
frames.push(block);
}
/**
* Pop the current block off the stack.
*/
public void pop() {
frames.pop();
}
/**
* Dump all variable definitions in each block of the stack. Used for
* debugging in a pinch.
*/
public String dumpDefinitions() {
Buffer buf = new Buffer(4);
int size = frames.size();
for (int i = size - 1; i >= 0; i--) {
frames.get(i).dumpDefs(buf);
buf.incrIndent();
}
return buf.toString();
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.remote;
import com.google.bytestream.ByteStreamGrpc;
import com.google.bytestream.ByteStreamGrpc.ByteStreamBlockingStub;
import com.google.bytestream.ByteStreamProto.ReadRequest;
import com.google.bytestream.ByteStreamProto.ReadResponse;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.ListeningScheduledExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.ActionInputFileCache;
import com.google.devtools.build.lib.actions.EnvironmentalExecException;
import com.google.devtools.build.lib.actions.ExecException;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
import com.google.devtools.build.lib.remote.Digests.ActionKey;
import com.google.devtools.build.lib.remote.TreeNodeRepository.TreeNode;
import com.google.devtools.build.lib.util.io.FileOutErr;
import com.google.devtools.build.lib.vfs.FileSystemUtils;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.remoteexecution.v1test.ActionCacheGrpc;
import com.google.devtools.remoteexecution.v1test.ActionCacheGrpc.ActionCacheBlockingStub;
import com.google.devtools.remoteexecution.v1test.ActionResult;
import com.google.devtools.remoteexecution.v1test.BatchUpdateBlobsRequest;
import com.google.devtools.remoteexecution.v1test.BatchUpdateBlobsResponse;
import com.google.devtools.remoteexecution.v1test.Command;
import com.google.devtools.remoteexecution.v1test.ContentAddressableStorageGrpc;
import com.google.devtools.remoteexecution.v1test.ContentAddressableStorageGrpc.ContentAddressableStorageBlockingStub;
import com.google.devtools.remoteexecution.v1test.Digest;
import com.google.devtools.remoteexecution.v1test.Directory;
import com.google.devtools.remoteexecution.v1test.FindMissingBlobsRequest;
import com.google.devtools.remoteexecution.v1test.FindMissingBlobsResponse;
import com.google.devtools.remoteexecution.v1test.GetActionResultRequest;
import com.google.devtools.remoteexecution.v1test.OutputFile;
import com.google.devtools.remoteexecution.v1test.UpdateActionResultRequest;
import com.google.protobuf.ByteString;
import io.grpc.Channel;
import io.grpc.Status;
import io.grpc.StatusRuntimeException;
import io.grpc.protobuf.StatusProto;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
/** A RemoteActionCache implementation that uses gRPC calls to a remote cache server. */
@ThreadSafe
public class GrpcRemoteCache implements RemoteActionCache {
private final RemoteOptions options;
private final ChannelOptions channelOptions;
private final Channel channel;
private final Retrier retrier;
private final ByteStreamUploader uploader;
private final ListeningScheduledExecutorService retryScheduler =
MoreExecutors.listeningDecorator(Executors.newScheduledThreadPool(1));
@VisibleForTesting
public GrpcRemoteCache(Channel channel, ChannelOptions channelOptions, RemoteOptions options,
Retrier retrier) {
this.options = options;
this.channelOptions = channelOptions;
this.channel = channel;
this.retrier = retrier;
uploader = new ByteStreamUploader(options.remoteInstanceName, channel,
channelOptions.getCallCredentials(), options.remoteTimeout, retrier, retryScheduler);
}
private ContentAddressableStorageBlockingStub casBlockingStub() {
return ContentAddressableStorageGrpc.newBlockingStub(channel)
.withCallCredentials(channelOptions.getCallCredentials())
.withDeadlineAfter(options.remoteTimeout, TimeUnit.SECONDS);
}
private ByteStreamBlockingStub bsBlockingStub() {
return ByteStreamGrpc.newBlockingStub(channel)
.withCallCredentials(channelOptions.getCallCredentials())
.withDeadlineAfter(options.remoteTimeout, TimeUnit.SECONDS);
}
private ActionCacheBlockingStub acBlockingStub() {
return ActionCacheGrpc.newBlockingStub(channel)
.withCallCredentials(channelOptions.getCallCredentials())
.withDeadlineAfter(options.remoteTimeout, TimeUnit.SECONDS);
}
@Override
public void close() {
retryScheduler.shutdownNow();
uploader.shutdown();
}
public static boolean isRemoteCacheOptions(RemoteOptions options) {
return options.remoteCache != null;
}
private ImmutableSet<Digest> getMissingDigests(Iterable<Digest> digests)
throws IOException, InterruptedException {
FindMissingBlobsRequest.Builder request =
FindMissingBlobsRequest.newBuilder()
.setInstanceName(options.remoteInstanceName)
.addAllBlobDigests(digests);
if (request.getBlobDigestsCount() == 0) {
return ImmutableSet.of();
}
FindMissingBlobsResponse response =
retrier.execute(() -> casBlockingStub().findMissingBlobs(request.build()));
return ImmutableSet.copyOf(response.getMissingBlobDigestsList());
}
/**
* Upload enough of the tree metadata and data into remote cache so that the entire tree can be
* reassembled remotely using the root digest.
*/
@Override
public void ensureInputsPresent(
TreeNodeRepository repository, Path execRoot, TreeNode root, Command command)
throws IOException, InterruptedException {
repository.computeMerkleDigests(root);
// TODO(olaola): avoid querying all the digests, only ask for novel subtrees.
ImmutableSet<Digest> missingDigests = getMissingDigests(repository.getAllDigests(root));
// Only upload data that was missing from the cache.
ArrayList<ActionInput> missingActionInputs = new ArrayList<>();
ArrayList<Directory> missingTreeNodes = new ArrayList<>();
repository.getDataFromDigests(missingDigests, missingActionInputs, missingTreeNodes);
if (!missingTreeNodes.isEmpty()) {
// TODO(olaola): split this into multiple requests if total size is > 10MB.
BatchUpdateBlobsRequest.Builder treeBlobRequest =
BatchUpdateBlobsRequest.newBuilder().setInstanceName(options.remoteInstanceName);
for (Directory d : missingTreeNodes) {
byte[] data = d.toByteArray();
treeBlobRequest
.addRequestsBuilder()
.setContentDigest(Digests.computeDigest(data))
.setData(ByteString.copyFrom(data));
}
retrier.execute(
() -> {
BatchUpdateBlobsResponse response =
casBlockingStub().batchUpdateBlobs(treeBlobRequest.build());
for (BatchUpdateBlobsResponse.Response r : response.getResponsesList()) {
if (!Status.fromCodeValue(r.getStatus().getCode()).isOk()) {
throw StatusProto.toStatusRuntimeException(r.getStatus());
}
}
return null;
});
}
uploadBlob(command.toByteArray());
if (!missingActionInputs.isEmpty()) {
List<Chunker> inputsToUpload = new ArrayList<>();
ActionInputFileCache inputFileCache = repository.getInputFileCache();
for (ActionInput actionInput : missingActionInputs) {
inputsToUpload.add(new Chunker(actionInput, inputFileCache, execRoot));
}
uploader.uploadBlobs(inputsToUpload);
}
}
/**
* Download all results of a remotely executed action locally. TODO(olaola): will need to amend to
* include the {@link com.google.devtools.build.lib.remote.TreeNodeRepository} for updating.
*/
@Override
public void download(ActionResult result, Path execRoot, FileOutErr outErr)
throws ExecException, IOException, InterruptedException {
try {
for (OutputFile file : result.getOutputFilesList()) {
Path path = execRoot.getRelative(file.getPath());
FileSystemUtils.createDirectoryAndParents(path.getParentDirectory());
Digest digest = file.getDigest();
if (digest.getSizeBytes() == 0) {
// Handle empty file locally.
FileSystemUtils.writeContent(path, new byte[0]);
} else {
if (!file.getContent().isEmpty()) {
try (OutputStream stream = path.getOutputStream()) {
file.getContent().writeTo(stream);
}
} else {
retrier.execute(
() -> {
try (OutputStream stream = path.getOutputStream()) {
readBlob(digest, stream);
}
return null;
});
Digest receivedDigest = Digests.computeDigest(path);
if (!receivedDigest.equals(digest)) {
throw new IOException(
"Digest does not match " + receivedDigest + " != " + digest);
}
}
}
path.setExecutable(file.getIsExecutable());
}
if (!result.getOutputDirectoriesList().isEmpty()) {
throw new UnsupportedOperationException();
}
// TODO(ulfjack): use same code as above also for stdout / stderr if applicable.
downloadOutErr(result, outErr);
} catch (IOException downloadException) {
try {
// Delete any (partially) downloaded output files, since any subsequent local execution
// of this action may expect none of the output files to exist.
for (OutputFile file : result.getOutputFilesList()) {
execRoot.getRelative(file.getPath()).delete();
}
outErr.getOutputPath().delete();
outErr.getErrorPath().delete();
} catch (IOException e) {
// If deleting of output files failed, we abort the build with a decent error message as
// any subsequent local execution failure would likely be incomprehensible.
// We don't propagate the downloadException, as this is a recoverable error and the cause
// of the build failure is really that we couldn't delete output files.
throw new EnvironmentalExecException("Failed to delete output files after incomplete "
+ "download. Cannot continue with local execution.", e, true);
}
throw downloadException;
}
}
private void downloadOutErr(ActionResult result, FileOutErr outErr)
throws IOException, InterruptedException {
if (!result.getStdoutRaw().isEmpty()) {
result.getStdoutRaw().writeTo(outErr.getOutputStream());
outErr.getOutputStream().flush();
} else if (result.hasStdoutDigest()) {
byte[] stdoutBytes = downloadBlob(result.getStdoutDigest());
outErr.getOutputStream().write(stdoutBytes);
outErr.getOutputStream().flush();
}
if (!result.getStderrRaw().isEmpty()) {
result.getStderrRaw().writeTo(outErr.getErrorStream());
outErr.getErrorStream().flush();
} else if (result.hasStderrDigest()) {
byte[] stderrBytes = downloadBlob(result.getStderrDigest());
outErr.getErrorStream().write(stderrBytes);
outErr.getErrorStream().flush();
}
}
/**
* This method can throw {@link StatusRuntimeException}, but the RemoteCache interface does not
* allow throwing such an exception. Any caller must make sure to catch the
* {@link StatusRuntimeException}. Note that the retrier implicitly catches it, so if this is used
* in the context of {@link Retrier#execute}, that's perfectly safe.
*
* <p>This method also converts any NOT_FOUND code returned from the server into a
* {@link CacheNotFoundException}. TODO(olaola): this is not enough. NOT_FOUND can also be raised
* by execute, in which case the server should return the missing digest in the Status.details
* field. This should be part of the API.
*/
private void readBlob(Digest digest, OutputStream stream)
throws IOException, StatusRuntimeException {
String resourceName = "";
if (!options.remoteInstanceName.isEmpty()) {
resourceName += options.remoteInstanceName + "/";
}
resourceName += "blobs/" + digest.getHash() + "/" + digest.getSizeBytes();
try {
Iterator<ReadResponse> replies = bsBlockingStub()
.read(ReadRequest.newBuilder().setResourceName(resourceName).build());
while (replies.hasNext()) {
replies.next().getData().writeTo(stream);
}
} catch (StatusRuntimeException e) {
if (e.getStatus().getCode() == Status.Code.NOT_FOUND) {
throw new CacheNotFoundException(digest);
}
throw e;
}
}
@Override
public void upload(ActionKey actionKey, Path execRoot, Collection<Path> files, FileOutErr outErr)
throws IOException, InterruptedException {
ActionResult.Builder result = ActionResult.newBuilder();
upload(execRoot, files, outErr, result);
try {
retrier.execute(
() ->
acBlockingStub()
.updateActionResult(
UpdateActionResultRequest.newBuilder()
.setInstanceName(options.remoteInstanceName)
.setActionDigest(actionKey.getDigest())
.setActionResult(result)
.build()));
} catch (RetryException e) {
if (e.causedByStatusCode(Status.Code.UNIMPLEMENTED)) {
// Silently return without upload.
return;
}
throw e;
}
}
void upload(Path execRoot, Collection<Path> files, FileOutErr outErr, ActionResult.Builder result)
throws IOException, InterruptedException {
Map<Digest, Path> digestToFile = new HashMap<>();
for (Path file : files) {
if (!file.exists()) {
// We ignore requested results that have not been generated by the action.
continue;
}
if (file.isDirectory()) {
// TODO(olaola): to implement this for a directory, will need to create or pass a
// TreeNodeRepository to call uploadTree.
throw new UnsupportedOperationException("Storing a directory is not yet supported.");
}
Digest digest = Digests.computeDigest(file);
// TODO(olaola): inline small results here.
result
.addOutputFilesBuilder()
.setPath(file.relativeTo(execRoot).getPathString())
.setDigest(digest)
.setIsExecutable(file.isExecutable());
digestToFile.put(digest, file);
}
ImmutableSet<Digest> digestsToUpload = getMissingDigests(digestToFile.keySet());
List<Chunker> filesToUpload = new ArrayList<>();
for (Digest digest : digestsToUpload) {
Path file = digestToFile.get(digest);
if (file == null) {
String message = "FindMissingBlobs call returned an unknown digest: " + digest;
throw new IOException(message);
}
filesToUpload.add(new Chunker(file));
}
if (!filesToUpload.isEmpty()) {
uploader.uploadBlobs(filesToUpload);
}
// TODO(olaola): inline small stdout/stderr here.
if (outErr.getErrorPath().exists()) {
Digest stderr = uploadFileContents(outErr.getErrorPath());
result.setStderrDigest(stderr);
}
if (outErr.getOutputPath().exists()) {
Digest stdout = uploadFileContents(outErr.getOutputPath());
result.setStdoutDigest(stdout);
}
}
/**
* Put the file contents cache if it is not already in it. No-op if the file is already stored in
* cache. The given path must be a full absolute path.
*
* @return The key for fetching the file contents blob from cache.
*/
private Digest uploadFileContents(Path file) throws IOException, InterruptedException {
Digest digest = Digests.computeDigest(file);
ImmutableSet<Digest> missing = getMissingDigests(ImmutableList.of(digest));
if (!missing.isEmpty()) {
uploader.uploadBlob(new Chunker(file));
}
return digest;
}
/**
* Put the file contents cache if it is not already in it. No-op if the file is already stored in
* cache. The given path must be a full absolute path.
*
* @return The key for fetching the file contents blob from cache.
*/
Digest uploadFileContents(ActionInput input, Path execRoot, ActionInputFileCache inputCache)
throws IOException, InterruptedException {
Digest digest = Digests.getDigestFromInputCache(input, inputCache);
ImmutableSet<Digest> missing = getMissingDigests(ImmutableList.of(digest));
if (!missing.isEmpty()) {
uploader.uploadBlob(new Chunker(input, inputCache, execRoot));
}
return digest;
}
Digest uploadBlob(byte[] blob) throws IOException, InterruptedException {
Digest digest = Digests.computeDigest(blob);
ImmutableSet<Digest> missing = getMissingDigests(ImmutableList.of(digest));
if (!missing.isEmpty()) {
uploader.uploadBlob(new Chunker(blob));
}
return digest;
}
byte[] downloadBlob(Digest digest)
throws IOException, InterruptedException {
if (digest.getSizeBytes() == 0) {
return new byte[0];
}
return retrier.execute(
() -> {
ByteArrayOutputStream stream = new ByteArrayOutputStream((int) digest.getSizeBytes());
readBlob(digest, stream);
return stream.toByteArray();
});
}
// Execution Cache API
@Override
public ActionResult getCachedActionResult(ActionKey actionKey)
throws IOException, InterruptedException {
try {
return retrier.execute(
() ->
acBlockingStub()
.getActionResult(
GetActionResultRequest.newBuilder()
.setInstanceName(options.remoteInstanceName)
.setActionDigest(actionKey.getDigest())
.build()));
} catch (RetryException e) {
if (e.causedByStatusCode(Status.Code.NOT_FOUND)) {
// Return null to indicate that it was a cache miss.
return null;
}
throw e;
}
}
}
| |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.errorreporting.spi.v1beta1;
import com.google.api.gax.core.ConnectionSettings;
import com.google.api.gax.core.RetrySettings;
import com.google.api.gax.grpc.ApiCallSettings;
import com.google.api.gax.grpc.PageStreamingDescriptor;
import com.google.api.gax.grpc.ServiceApiSettings;
import com.google.api.gax.grpc.SimpleCallSettings;
import com.google.auth.Credentials;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.devtools.clouderrorreporting.v1beta1.ErrorGroup;
import com.google.devtools.clouderrorreporting.v1beta1.ErrorGroupServiceGrpc;
import com.google.devtools.clouderrorreporting.v1beta1.GetGroupRequest;
import com.google.devtools.clouderrorreporting.v1beta1.UpdateGroupRequest;
import io.grpc.ManagedChannel;
import io.grpc.Status;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ScheduledExecutorService;
import org.joda.time.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS
/**
* Settings class to configure an instance of {@link ErrorGroupServiceApi}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (clouderrorreporting.googleapis.com) and default port (443)
* are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders.
* When build() is called, the tree of builders is called to create the complete settings
* object. For example, to set the total timeout of getGroup to 30 seconds:
*
* <pre>
* <code>
* ErrorGroupServiceSettings.Builder errorGroupServiceSettingsBuilder =
* ErrorGroupServiceSettings.defaultBuilder();
* errorGroupServiceSettingsBuilder.getGroupSettings().getRetrySettingsBuilder()
* .setTotalTimeout(Duration.standardSeconds(30));
* ErrorGroupServiceSettings errorGroupServiceSettings = errorGroupServiceSettingsBuilder.build();
* </code>
* </pre>
*/
@javax.annotation.Generated("by GAPIC")
public class ErrorGroupServiceSettings extends ServiceApiSettings {
/**
* The default address of the service.
*/
private static final String DEFAULT_SERVICE_ADDRESS = "clouderrorreporting.googleapis.com";
/**
* The default port of the service.
*/
private static final int DEFAULT_SERVICE_PORT = 443;
/**
* The default scopes of the service.
*/
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
/**
* The default connection settings of the service.
*/
public static final ConnectionSettings DEFAULT_CONNECTION_SETTINGS =
ConnectionSettings.newBuilder()
.setServiceAddress(DEFAULT_SERVICE_ADDRESS)
.setPort(DEFAULT_SERVICE_PORT)
.provideCredentialsWith(DEFAULT_SERVICE_SCOPES)
.build();
private final SimpleCallSettings<GetGroupRequest, ErrorGroup> getGroupSettings;
private final SimpleCallSettings<UpdateGroupRequest, ErrorGroup> updateGroupSettings;
/**
* Returns the object with the settings used for calls to getGroup.
*/
public SimpleCallSettings<GetGroupRequest, ErrorGroup> getGroupSettings() {
return getGroupSettings;
}
/**
* Returns the object with the settings used for calls to updateGroup.
*/
public SimpleCallSettings<UpdateGroupRequest, ErrorGroup> updateGroupSettings() {
return updateGroupSettings;
}
/**
* Returns the default service address.
*/
public static String getDefaultServiceAddress() {
return DEFAULT_SERVICE_ADDRESS;
}
/**
* Returns the default service port.
*/
public static int getDefaultServicePort() {
return DEFAULT_SERVICE_PORT;
}
/**
* Returns the default service scopes.
*/
public static ImmutableList<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/**
* Returns a builder for this class with recommended defaults.
*/
public static Builder defaultBuilder() {
return Builder.createDefault();
}
/**
* Returns a new builder for this class.
*/
public static Builder newBuilder() {
return new Builder();
}
/**
* Returns a builder containing all the values of this settings class.
*/
public Builder toBuilder() {
return new Builder(this);
}
private ErrorGroupServiceSettings(Builder settingsBuilder) throws IOException {
super(
settingsBuilder.getChannelProvider(),
settingsBuilder.getExecutorProvider(),
settingsBuilder.getGeneratorName(),
settingsBuilder.getGeneratorVersion(),
settingsBuilder.getClientLibName(),
settingsBuilder.getClientLibVersion());
getGroupSettings = settingsBuilder.getGroupSettings().build();
updateGroupSettings = settingsBuilder.updateGroupSettings().build();
}
/**
* Builder for ErrorGroupServiceSettings.
*/
public static class Builder extends ServiceApiSettings.Builder {
private final ImmutableList<ApiCallSettings.Builder> methodSettingsBuilders;
private SimpleCallSettings.Builder<GetGroupRequest, ErrorGroup> getGroupSettings;
private SimpleCallSettings.Builder<UpdateGroupRequest, ErrorGroup> updateGroupSettings;
private static final ImmutableMap<String, ImmutableSet<Status.Code>> RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<Status.Code>> definitions = ImmutableMap.builder();
definitions.put(
"idempotent",
Sets.immutableEnumSet(
Lists.<Status.Code>newArrayList(
Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE)));
definitions.put("non_idempotent", Sets.immutableEnumSet(Lists.<Status.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings.Builder> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings.Builder> definitions = ImmutableMap.builder();
RetrySettings.Builder settingsBuilder = null;
settingsBuilder =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.millis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.millis(60000L))
.setInitialRpcTimeout(Duration.millis(20000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.millis(20000L))
.setTotalTimeout(Duration.millis(600000L));
definitions.put("default", settingsBuilder);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
private Builder() {
super(DEFAULT_CONNECTION_SETTINGS);
getGroupSettings = SimpleCallSettings.newBuilder(ErrorGroupServiceGrpc.METHOD_GET_GROUP);
updateGroupSettings =
SimpleCallSettings.newBuilder(ErrorGroupServiceGrpc.METHOD_UPDATE_GROUP);
methodSettingsBuilders =
ImmutableList.<ApiCallSettings.Builder>of(getGroupSettings, updateGroupSettings);
}
private static Builder createDefault() {
Builder builder = new Builder();
builder
.getGroupSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.updateGroupSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default"));
return builder;
}
private Builder(ErrorGroupServiceSettings settings) {
super(settings);
getGroupSettings = settings.getGroupSettings.toBuilder();
updateGroupSettings = settings.updateGroupSettings.toBuilder();
methodSettingsBuilders =
ImmutableList.<ApiCallSettings.Builder>of(getGroupSettings, updateGroupSettings);
}
@Override
protected ConnectionSettings getDefaultConnectionSettings() {
return DEFAULT_CONNECTION_SETTINGS;
}
@Override
public Builder provideExecutorWith(ScheduledExecutorService executor, boolean shouldAutoClose) {
super.provideExecutorWith(executor, shouldAutoClose);
return this;
}
@Override
public Builder provideChannelWith(ManagedChannel channel, boolean shouldAutoClose) {
super.provideChannelWith(channel, shouldAutoClose);
return this;
}
@Override
public Builder provideChannelWith(ConnectionSettings settings) {
super.provideChannelWith(settings);
return this;
}
@Override
public Builder provideChannelWith(Credentials credentials) {
super.provideChannelWith(credentials);
return this;
}
@Override
public Builder provideChannelWith(List<String> scopes) {
super.provideChannelWith(scopes);
return this;
}
@Override
public Builder setGeneratorHeader(String name, String version) {
super.setGeneratorHeader(name, version);
return this;
}
@Override
public Builder setClientLibHeader(String name, String version) {
super.setClientLibHeader(name, version);
return this;
}
/**
* Applies the given settings to all of the API methods in this service. Only
* values that are non-null will be applied, so this method is not capable
* of un-setting any values.
*/
public Builder applyToAllApiMethods(ApiCallSettings.Builder apiCallSettings) throws Exception {
super.applyToAllApiMethods(methodSettingsBuilders, apiCallSettings);
return this;
}
/**
* Returns the builder for the settings used for calls to getGroup.
*/
public SimpleCallSettings.Builder<GetGroupRequest, ErrorGroup> getGroupSettings() {
return getGroupSettings;
}
/**
* Returns the builder for the settings used for calls to updateGroup.
*/
public SimpleCallSettings.Builder<UpdateGroupRequest, ErrorGroup> updateGroupSettings() {
return updateGroupSettings;
}
@Override
public ErrorGroupServiceSettings build() throws IOException {
return new ErrorGroupServiceSettings(this);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.taskexecutor;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.configuration.JobManagerOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.configuration.UnmodifiableConfiguration;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.runtime.concurrent.Executors;
import org.apache.flink.runtime.highavailability.HighAvailabilityServices;
import org.apache.flink.runtime.highavailability.HighAvailabilityServicesUtils;
import org.apache.flink.runtime.rpc.RpcService;
import org.apache.flink.util.IOUtils;
import org.apache.flink.util.TestLogger;
import net.jcip.annotations.NotThreadSafe;
import org.hamcrest.Description;
import org.hamcrest.TypeSafeMatcher;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import sun.net.util.IPAddressUtil;
import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.ServerSocket;
import java.net.URI;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeNoException;
/**
* Validates that the TaskManagerRunner startup properly obeys the configuration
* values.
*
* <p>NOTE: at least {@link #testDefaultFsParameterLoading()} should not be run in parallel to other
* tests in the same JVM as it modifies a static (private) member of the {@link FileSystem} class
* and verifies its content.
*/
@NotThreadSafe
public class TaskManagerRunnerConfigurationTest extends TestLogger {
private static final int TEST_TIMEOUT_SECONDS = 10;
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Test
public void testTaskManagerRpcServiceShouldBindToConfiguredTaskManagerHostname() throws Exception {
final String taskmanagerHost = "testhostname";
final Configuration config = createFlinkConfigWithPredefinedTaskManagerHostname(taskmanagerHost);
final HighAvailabilityServices highAvailabilityServices = createHighAvailabilityServices(config);
RpcService taskManagerRpcService = null;
try {
taskManagerRpcService = TaskManagerRunner.createRpcService(config, highAvailabilityServices);
assertThat(taskManagerRpcService.getPort(), is(greaterThanOrEqualTo(0)));
assertThat(taskManagerRpcService.getAddress(), is(equalTo(taskmanagerHost)));
} finally {
maybeCloseRpcService(taskManagerRpcService);
highAvailabilityServices.closeAndCleanupAllData();
}
}
@Test
public void testTaskManagerRpcServiceShouldBindToHostnameAddress() throws Exception {
final Configuration config = createFlinkConfigWithHostBindPolicy(HostBindPolicy.NAME);
final HighAvailabilityServices highAvailabilityServices = createHighAvailabilityServices(config);
RpcService taskManagerRpcService = null;
try {
taskManagerRpcService = TaskManagerRunner.createRpcService(config, highAvailabilityServices);
assertThat(taskManagerRpcService.getAddress(), not(isEmptyOrNullString()));
} finally {
maybeCloseRpcService(taskManagerRpcService);
highAvailabilityServices.closeAndCleanupAllData();
}
}
@Test
public void testTaskManagerRpcServiceShouldBindToIpAddressDeterminedByConnectingToResourceManager() throws Exception {
final ServerSocket testJobManagerSocket = openServerSocket();
final Configuration config = createFlinkConfigWithJobManagerPort(testJobManagerSocket.getLocalPort());
final HighAvailabilityServices highAvailabilityServices = createHighAvailabilityServices(config);
RpcService taskManagerRpcService = null;
try {
taskManagerRpcService = TaskManagerRunner.createRpcService(config, highAvailabilityServices);
assertThat(taskManagerRpcService.getAddress(), is(ipAddress()));
} finally {
maybeCloseRpcService(taskManagerRpcService);
highAvailabilityServices.closeAndCleanupAllData();
IOUtils.closeQuietly(testJobManagerSocket);
}
}
@Test
public void testCreatingTaskManagerRpcServiceShouldFailIfRpcPortRangeIsInvalid() throws Exception {
final Configuration config = new Configuration(createFlinkConfigWithPredefinedTaskManagerHostname("example.org"));
config.setString(TaskManagerOptions.RPC_PORT, "-1");
final HighAvailabilityServices highAvailabilityServices = createHighAvailabilityServices(config);
try {
TaskManagerRunner.createRpcService(config, highAvailabilityServices);
fail("Should fail because -1 is not a valid port range");
} catch (final IllegalArgumentException e) {
assertThat(e.getMessage(), containsString("Invalid port range definition: -1"));
} finally {
highAvailabilityServices.closeAndCleanupAllData();
}
}
@Test
public void testDefaultFsParameterLoading() throws Exception {
try {
final File tmpDir = temporaryFolder.newFolder();
final File confFile = new File(tmpDir, GlobalConfiguration.FLINK_CONF_FILENAME);
final URI defaultFS = new URI("otherFS", null, "localhost", 1234, null, null, null);
final PrintWriter pw1 = new PrintWriter(confFile);
pw1.println("fs.default-scheme: " + defaultFS);
pw1.close();
String[] args = new String[] {"--configDir", tmpDir.toString()};
Configuration configuration = TaskManagerRunner.loadConfiguration(args);
FileSystem.initialize(configuration);
assertEquals(defaultFS, FileSystem.getDefaultFsUri());
}
finally {
// reset FS settings
FileSystem.initialize(new Configuration());
}
}
private static Configuration createFlinkConfigWithPredefinedTaskManagerHostname(
final String taskmanagerHost) {
final Configuration config = new Configuration();
config.setString(TaskManagerOptions.HOST, taskmanagerHost);
config.setString(JobManagerOptions.ADDRESS, "localhost");
return new UnmodifiableConfiguration(config);
}
private static Configuration createFlinkConfigWithHostBindPolicy(final HostBindPolicy bindPolicy) {
final Configuration config = new Configuration();
config.setString(TaskManagerOptions.HOST_BIND_POLICY, bindPolicy.toString());
config.setString(JobManagerOptions.ADDRESS, "localhost");
return new UnmodifiableConfiguration(config);
}
private static Configuration createFlinkConfigWithJobManagerPort(final int port) {
Configuration config = new Configuration();
config.setString(JobManagerOptions.ADDRESS, "localhost");
config.setInteger(JobManagerOptions.PORT, port);
return new UnmodifiableConfiguration(config);
}
private HighAvailabilityServices createHighAvailabilityServices(final Configuration config) throws Exception {
return HighAvailabilityServicesUtils.createHighAvailabilityServices(
config,
Executors.directExecutor(),
HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION);
}
private static ServerSocket openServerSocket() {
try {
return new ServerSocket(0);
} catch (IOException e) {
assumeNoException("Skip test because could not open a server socket", e);
throw new RuntimeException("satisfy compiler");
}
}
private static void maybeCloseRpcService(@Nullable final RpcService rpcService) throws Exception {
if (rpcService != null) {
rpcService.stopService().get(TEST_TIMEOUT_SECONDS, TimeUnit.SECONDS);
}
}
private static TypeSafeMatcher<String> ipAddress() {
return new TypeSafeMatcher<String>() {
@Override
protected boolean matchesSafely(String value) {
return IPAddressUtil.isIPv4LiteralAddress(value) || IPAddressUtil.isIPv6LiteralAddress(value);
}
@Override
public void describeTo(Description description) {
description.appendText("Is an ip address.");
}
};
}
}
| |
/*
* Copyright (c) 2011-2018, Meituan Dianping. All Rights Reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dianping.cat.helper;
import java.util.Calendar;
import java.util.Date;
public class TimeHelper {
public static final long ONE_SECOND = 1000L;
public static final long ONE_MINUTE = 60 * 1000L;
public static final long ONE_HOUR = 60 * 60 * 1000L;
public static final long ONE_DAY = 24 * ONE_HOUR;
public static final long ONE_WEEK = 7 * ONE_DAY;
public static Date addDays(Date date, int day) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.add(Calendar.DAY_OF_MONTH, day);
return cal.getTime();
}
public static Date getCurrentDay() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTime();
}
public static Date getCurrentDay(int index) {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.add(Calendar.DAY_OF_MONTH, index);
return cal.getTime();
}
public static Date getCurrentDay(long timestamp) {
return getCurrentDay(timestamp, 0);
}
public static Date getCurrentDay(long timestamp, int index) {
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(timestamp);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.add(Calendar.DAY_OF_MONTH, index);
return cal.getTime();
}
public static Date getCurrentHour() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTime();
}
public static Date getCurrentHour(int index) {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.add(Calendar.HOUR_OF_DAY, index);
return cal.getTime();
}
public static Date getCurrentMinute() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTime();
}
public static Date getCurrentMinute(int index) {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.add(Calendar.MINUTE, index);
return cal.getTime();
}
public static Date getCurrentMonth() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return cal.getTime();
}
// get lastest sarterday
public static Date getCurrentWeek() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
int dayOfWeek = cal.get(Calendar.DAY_OF_WEEK);
if (dayOfWeek == 7) {
return cal.getTime();
} else {
cal.add(Calendar.DATE, -dayOfWeek);
}
return cal.getTime();
}
public static Date getLastMonth() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.add(Calendar.MONTH, -1);
return cal.getTime();
}
public static String getMinuteStr() {
int minute = Calendar.getInstance().get(Calendar.MINUTE);
String minuteStr = String.valueOf(minute);
if (minute < 10) {
minuteStr = '0' + minuteStr;
}
return "M" + minuteStr;
}
public static Date getStepSecond(int step) {
long current = System.currentTimeMillis();
long gap = current % ONE_MINUTE;
long minute = current - gap;
int index = (int) gap / (int) (step * ONE_SECOND);
return new Date(minute + index * step * ONE_SECOND);
}
public static Date getYesterday() {
Calendar cal = Calendar.getInstance();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
cal.add(Calendar.DAY_OF_MONTH, -1);
return cal.getTime();
}
public static boolean sleepToNextMinute() {
try {
long current = System.currentTimeMillis();
Thread.sleep(ONE_MINUTE - current % ONE_MINUTE + 500);
return true;
} catch (InterruptedException e) {
return false;
}
}
public static boolean sleepToNextMinute(long overTime) {
try {
long current = System.currentTimeMillis();
Thread.sleep(ONE_MINUTE - current % ONE_MINUTE + overTime);
return true;
} catch (InterruptedException e) {
return false;
}
}
}
| |
/**
* Copyright (c) 2015 Source Auditor Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.spdx.rdfparser.model;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ImmutableMap;
import org.apache.log4j.Logger;
import org.spdx.rdfparser.IModelContainer;
import org.spdx.rdfparser.InvalidSPDXAnalysisException;
import org.spdx.rdfparser.RdfModelHelper;
import org.spdx.rdfparser.RdfParserHelper;
import org.spdx.rdfparser.SpdxRdfConstants;
import org.spdx.rdfparser.license.AnyLicenseInfo;
import org.spdx.rdfparser.model.Checksum.ChecksumAlgorithm;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.hp.hpl.jena.graph.Node;
import com.hp.hpl.jena.graph.Triple;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.util.iterator.ExtendedIterator;
/**
* A File represents a named sequence of information
* that is contained in a software package.
* @author Gary O'Neall
*
*/
public class SpdxFile extends SpdxItem implements Comparable<SpdxFile> {
static final Logger logger = Logger.getLogger(SpdxFile.class.getName());
public enum FileType {fileType_application, fileType_archive,
fileType_audio, fileType_binary, fileType_documentation,
fileType_image, fileType_other, fileType_source, fileType_spdx,
fileType_text, fileType_video;
@SuppressWarnings("deprecation")
/**
* @return The tag for this file type.
*/
public String getTag(){
return FILE_TYPE_TO_TAG.get(this);
}
/**
*
* @param tag
* @return The file type corresponding to the provided tag.
*/
@SuppressWarnings("deprecation")
public static FileType fromTag(String tag){
return TAG_TO_FILE_TYPE.get(tag);
}
}
@Deprecated
/**
* Use {@link FileType#getTag()} instead.
* @deprecated
*/
public static final Map<FileType, String> FILE_TYPE_TO_TAG;
@Deprecated
/**
* Use {@link org.spdx.rdfparser.model.SpdxFile.FileType#fromTag(String)} instead
* @deprecated
*/
public static final Map<String, FileType> TAG_TO_FILE_TYPE;
static {
ImmutableMap.Builder<FileType, String> fileTypeToTagBuilder = ImmutableMap.builder();
ImmutableMap.Builder<String, FileType> tagToFileTypeBuilder = ImmutableMap.builder();
fileTypeToTagBuilder.put(FileType.fileType_application, "APPLICATION");
tagToFileTypeBuilder.put("APPLICATION", FileType.fileType_application);
fileTypeToTagBuilder.put(FileType.fileType_archive, "ARCHIVE");
tagToFileTypeBuilder.put("ARCHIVE", FileType.fileType_archive);
fileTypeToTagBuilder.put(FileType.fileType_audio, "AUDIO");
tagToFileTypeBuilder.put("AUDIO", FileType.fileType_audio);
fileTypeToTagBuilder.put(FileType.fileType_binary, "BINARY");
tagToFileTypeBuilder.put("BINARY", FileType.fileType_binary);
fileTypeToTagBuilder.put(FileType.fileType_documentation, "DOCUMENTATION");
tagToFileTypeBuilder.put("DOCUMENTATION", FileType.fileType_documentation);
fileTypeToTagBuilder.put(FileType.fileType_image, "IMAGE");
tagToFileTypeBuilder.put("IMAGE", FileType.fileType_image);
fileTypeToTagBuilder.put(FileType.fileType_other, "OTHER");
tagToFileTypeBuilder.put("OTHER", FileType.fileType_other);
fileTypeToTagBuilder.put(FileType.fileType_source, "SOURCE");
tagToFileTypeBuilder.put("SOURCE", FileType.fileType_source);
fileTypeToTagBuilder.put(FileType.fileType_spdx, "SPDX");
tagToFileTypeBuilder.put("SPDX", FileType.fileType_spdx);
fileTypeToTagBuilder.put(FileType.fileType_text, "TEXT");
tagToFileTypeBuilder.put("TEXT", FileType.fileType_text);
fileTypeToTagBuilder.put(FileType.fileType_video, "VIDEO");
tagToFileTypeBuilder.put("VIDEO", FileType.fileType_video);
FILE_TYPE_TO_TAG = fileTypeToTagBuilder.build();
TAG_TO_FILE_TYPE = tagToFileTypeBuilder.build();
}
FileType[] fileTypes = new FileType[0];
Checksum[] checksums;
String[] fileContributors = new String[0];
String noticeText;
DoapProject[] artifactOf = new DoapProject[0];
SpdxFile[] fileDependencies = new SpdxFile[0];
/**
* @param name fileName
* @param comment Comment on the file
* @param annotations annotations for the file
* @param relationships Relationships to this file
* @param licenseConcluded
* @param licenseInfoInFile
* @param copyrightText
* @param licenseComment
* @throws InvalidSPDXAnalysisException
*/
public SpdxFile(String name, String comment, Annotation[] annotations,
Relationship[] relationships, AnyLicenseInfo licenseConcluded,
AnyLicenseInfo[] licenseInfoInFile, String copyrightText,
String licenseComment, FileType[] fileTypes, Checksum[] checksums,
String[] fileContributors, String noticeText, DoapProject[] artifactOf) throws InvalidSPDXAnalysisException {
super(name, comment, annotations, relationships,
licenseConcluded, licenseInfoInFile,
copyrightText, licenseComment);
this.fileTypes = fileTypes;
if (this.fileTypes == null) {
this.fileTypes = new FileType[0];
}
this.checksums = checksums;
if (this.checksums == null) {
this.checksums = new Checksum[0];
}
this.fileContributors = fileContributors;
if (this.fileContributors == null) {
this.fileContributors = new String[0];
}
this.noticeText = noticeText;
this.fileDependencies = new SpdxFile[0];
this.artifactOf = artifactOf;
if (this.artifactOf == null) {
this.artifactOf = new DoapProject[0];
}
}
/**
* @param fileName
* @param fileTypes
* @param sha1Value
* @param licenseConcluded
* @param licenseInfoInFiles
* @param licenseComment
* @param copyrightText
* @param artifactOfs
* @param fileComment
* @throws InvalidSPDXAnalysisException
*/
public SpdxFile(String fileName, FileType[] fileTypes, String sha1Value,
AnyLicenseInfo licenseConcluded, AnyLicenseInfo[] licenseInfoInFiles,
String licenseComment, String copyrightText,
DoapProject[] artifactOfs, String fileComment) throws InvalidSPDXAnalysisException {
this(fileName, fileComment, new Annotation[0], new Relationship[0], licenseConcluded,
licenseInfoInFiles, copyrightText, licenseComment, fileTypes,
new Checksum[] {new Checksum(ChecksumAlgorithm.checksumAlgorithm_sha1, sha1Value) },
new String[0], "", artifactOfs);
}
/* (non-Javadoc)
* @see org.spdx.rdfparser.model.RdfModelObject#getPropertiesFromModel()
*/
@Override
public void getPropertiesFromModel() throws InvalidSPDXAnalysisException {
super.getPropertiesFromModel();
getMyPropertiesFromModel();
}
void getMyPropertiesFromModel() throws InvalidSPDXAnalysisException {
String[] fileTypeUris = findUriPropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_TYPE);
this.fileTypes = urisToFileType(fileTypeUris, false);
this.checksums = findMultipleChecksumPropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_CHECKSUM);
this.fileContributors = findMultiplePropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_CONTRIBUTOR);
this.noticeText = findSinglePropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_NOTICE);
// File dependencies
SpdxElement[] fileDependencyElements = findMultipleElementPropertyValues(
SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_FILE_FILE_DEPENDENCY);
int count = 0;
if (fileDependencyElements != null) {
for (int i = 0; i < fileDependencyElements.length; i++) {
if (fileDependencyElements[i] instanceof SpdxFile) {
count++;
}
}
}
if (count > 0) {
this.fileDependencies = new SpdxFile[count];
int j = 0;
for (int i = 0; i < fileDependencyElements.length; i++) {
if (fileDependencyElements[i] instanceof SpdxFile) {
this.fileDependencies[j++] = (SpdxFile)fileDependencyElements[i];
}
}
} else {
this.fileDependencies = new SpdxFile[0];
}
// ArtifactOfs
this.artifactOf = findMultipleDoapPropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_ARTIFACTOF);
}
/**
* @param modelContainer
* @param node
* @throws InvalidSPDXAnalysisException
*/
public SpdxFile(IModelContainer modelContainer, Node node)
throws InvalidSPDXAnalysisException {
super(modelContainer, node);
getMyPropertiesFromModel();
}
/**
* Finds the resource for an existing file in the model
* @param spdxFile
* @return resource of an SPDX file with the same name and checksum. Null if none found
* @throws InvalidSPDXAnalysisException
*/
static protected Resource findFileResource(IModelContainer modelContainer, SpdxFile spdxFile) throws InvalidSPDXAnalysisException {
// find any matching file names
Model model = modelContainer.getModel();
Node fileNameProperty = model.getProperty(SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_FILE_NAME).asNode();
Triple fileNameMatch = Triple.createMatch(null, fileNameProperty, Node.createLiteral(spdxFile.getName()));
ExtendedIterator<Triple> filenameMatchIter = model.getGraph().find(fileNameMatch);
if (filenameMatchIter.hasNext()) {
Triple fileMatchTriple = filenameMatchIter.next();
Node fileNode = fileMatchTriple.getSubject();
// check the checksum
Node checksumProperty = model.getProperty(SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_FILE_CHECKSUM).asNode();
Triple checksumMatch = Triple.createMatch(fileNode, checksumProperty, null);
ExtendedIterator<Triple> checksumMatchIterator = model.getGraph().find(checksumMatch);
if (checksumMatchIterator.hasNext()) {
Triple checksumMatchTriple = checksumMatchIterator.next();
Checksum cksum = new Checksum(modelContainer, checksumMatchTriple.getObject());
if (cksum.getAlgorithm().equals(ChecksumAlgorithm.checksumAlgorithm_sha1) &&
cksum.getValue().compareToIgnoreCase(spdxFile.getSha1()) == 0) {
return RdfParserHelper.convertToResource(model, fileNode);
}
}
}
// if we get to here, we did not find a match
return null;
}
@Override
public Resource findDuplicateResource(IModelContainer modelContainer, String uri) throws InvalidSPDXAnalysisException {
// see if we want to change what is considered a duplicate
// currently, a file is considered a duplicate if the checksum and filename
// are the same.
return findFileResource(modelContainer, this);
}
/**
* @return the Sha1 checksum value for this file, or a blank string if no
* sha1 checksum has been set
*/
public String getSha1() {
if (this.checksums != null) {
for (int i = 0;i < this.checksums.length; i++) {
if (this.checksums[i].getAlgorithm().equals(ChecksumAlgorithm.checksumAlgorithm_sha1)) {
return this.checksums[i].getValue();
}
}
}
// No sha1 found, return an empty string
return "";
}
/**
* Converts URI's for the different file types to file types
* @param uris
* @param ignoreErrors If true, any URI's that don't correspond to a know file type will not be included. If true, an exception is thrown.
* @return
* @throws InvalidSPDXAnalysisException
*/
private FileType[] urisToFileType(String[] uris, boolean ignoreErrors) throws InvalidSPDXAnalysisException {
List<FileType> retval = Lists.newArrayList();
for (int i = 0; i < uris.length; i++) {
if (uris[i] != null && !uris[i].isEmpty()) {
if (!uris[i].startsWith(SpdxRdfConstants.SPDX_NAMESPACE)) {
throw(new InvalidSPDXAnalysisException("Invalid file type: "+uris[i]));
}
String fileTypeS = uris[i].substring(SpdxRdfConstants.SPDX_NAMESPACE.length());
try {
retval.add(FileType.valueOf(fileTypeS));
} catch (Exception ex) {
logger.error("Invalid file type in the model - "+fileTypeS);
if (!ignoreErrors) {
throw(new InvalidSPDXAnalysisException("Invalid file type: "+uris[i]));
}
}
}
}
return retval.toArray(new FileType[retval.size()]);
}
private String[] fileTypesToUris(FileType[] fileTypes) {
String[] retval = new String[fileTypes.length];
for (int i = 0; i < retval.length; i++) {
retval[i] = SpdxRdfConstants.SPDX_NAMESPACE + fileTypes[i].toString();
}
return retval;
}
@Override
public void populateModel() throws InvalidSPDXAnalysisException {
super.populateModel();
setPropertyUriValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_TYPE,
fileTypesToUris(this.fileTypes));
setPropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_CHECKSUM, this.checksums);
setPropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_CONTRIBUTOR, this.fileContributors);
setPropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_NOTICE, noticeText);
setPropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_ARTIFACTOF, artifactOf);
setPropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_FILE_DEPENDENCY, fileDependencies, false);
}
@Override
protected String getLicenseInfoFromFilesPropertyName() {
return SpdxRdfConstants.PROP_FILE_SEEN_LICENSE;
}
@Override
protected String getNamePropertyName() {
return SpdxRdfConstants.PROP_FILE_NAME;
}
/* (non-Javadoc)
* @see org.spdx.rdfparser.model.RdfModelObject#getType(com.hp.hpl.jena.rdf.model.Model)
*/
@Override
public Resource getType(Model model) {
return model.createResource(SpdxRdfConstants.SPDX_NAMESPACE + SpdxRdfConstants.CLASS_SPDX_FILE);
}
/**
* @return the fileType
*/
public FileType[] getFileTypes() {
if (this.resource != null && this.refreshOnGet) {
String[] fileTypeUris = findUriPropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_TYPE);
try {
this.fileTypes = urisToFileType(fileTypeUris, true);
} catch (InvalidSPDXAnalysisException e) {
// ignore the error
}
}
return fileTypes;
}
/**
* @param fileTypes the fileTypes to set
* @throws InvalidSPDXAnalysisException
*/
public void setFileTypes(FileType[] fileTypes) throws InvalidSPDXAnalysisException {
this.fileTypes = fileTypes;
if (this.fileTypes == null) {
this.fileTypes = new FileType[0];
}
setPropertyUriValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_TYPE,
fileTypesToUris(this.fileTypes));
}
/**
* Add a file type to this file
* @param fileType
* @throws InvalidSPDXAnalysisException
*/
public void addFileType(FileType fileType) throws InvalidSPDXAnalysisException {
if (fileType == null) {
return;
}
this.fileTypes = Arrays.copyOf(this.fileTypes, this.fileTypes.length + 1);
this.fileTypes[this.fileTypes.length-1] = fileType;
addPropertyUriValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_TYPE, fileType.toString());
}
/**
* @return the checksums
*/
public Checksum[] getChecksums() {
if (this.resource != null && this.refreshOnGet) {
try {
Checksum[] refresh = findMultipleChecksumPropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_CHECKSUM);
if (refresh == null || !arraysEquivalent(refresh, this.checksums, true)) {
this.checksums = refresh;
}
} catch (InvalidSPDXAnalysisException e) {
logger.error("Invalid checksum in model");
}
}
return checksums;
}
/**
* @param checksums the checksums to set
* @throws InvalidSPDXAnalysisException
*/
public void setChecksums(Checksum[] checksums) throws InvalidSPDXAnalysisException {
this.checksums = checksums;
setPropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_CHECKSUM, this.checksums);
}
/**
* Add a checksum
* @param checksum
* @throws InvalidSPDXAnalysisException
*/
public void addChecksum(Checksum checksum) throws InvalidSPDXAnalysisException {
if (checksum == null) {
return;
}
this.checksums = Arrays.copyOf(this.checksums, this.checksums.length + 1);
this.checksums[this.checksums.length - 1] = checksum;
addPropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_CHECKSUM, checksum);
}
/**
* @return the fileContributors
*/
public String[] getFileContributors() {
if (this.resource != null && this.refreshOnGet) {
this.fileContributors = findMultiplePropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_CONTRIBUTOR);
}
return fileContributors;
}
/**
* @param fileContributors the fileContributors to set
*/
public void setFileContributors(String[] fileContributors) {
if (fileContributors == null) {
this.fileContributors = new String[0];
} else {
this.fileContributors = fileContributors;
}
setPropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_CONTRIBUTOR, fileContributors);
}
/**
* @return the noticeText
*/
public String getNoticeText() {
if (this.resource != null && this.refreshOnGet) {
this.noticeText = findSinglePropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_NOTICE);
}
return noticeText;
}
/**
* @param noticeText the noticeText to set
*/
public void setNoticeText(String noticeText) {
this.noticeText = noticeText;
setPropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_NOTICE, noticeText);
}
/**
* @return the artifactOf
*/
public DoapProject[] getArtifactOf() {
if (this.resource != null && this.refreshOnGet) {
try {
DoapProject[] refresh = findMultipleDoapPropertyValues(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_ARTIFACTOF);
if (refresh == null || !arraysEquivalent(refresh, this.artifactOf, true)) {
this.artifactOf = refresh;
}
} catch (InvalidSPDXAnalysisException e) {
logger.error("Invalid artifact of in the model");
}
}
return artifactOf;
}
/**
* @param artifactOf the artifactOf to set
* @throws InvalidSPDXAnalysisException
*/
public void setArtifactOf(DoapProject[] artifactOf) throws InvalidSPDXAnalysisException {
if (artifactOf == null) {
this.artifactOf = new DoapProject[0];
} else {
this.artifactOf = artifactOf;
}
setPropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_ARTIFACTOF, this.artifactOf);
}
/**
* This method should no longer be used. The Relationship property should be used in its place.
* @return the fileDependencies
*/
@Deprecated
public SpdxFile[] getFileDependencies() {
if (this.resource != null && this.refreshOnGet) {
try {
SpdxElement[] fileDependencyElements = findMultipleElementPropertyValues(
SpdxRdfConstants.SPDX_NAMESPACE, SpdxRdfConstants.PROP_FILE_FILE_DEPENDENCY);
if (!arraysEquivalent(fileDependencyElements, this.fileDependencies, false)) {
int count = 0;
if (fileDependencyElements != null) {
for (int i = 0; i < fileDependencyElements.length; i++) {
if (fileDependencyElements[i] instanceof SpdxFile) {
count++;
}
}
}
if (count > 0) {
this.fileDependencies = new SpdxFile[count];
int j = 0;
for (int i = 0; i < fileDependencyElements.length; i++) {
if (fileDependencyElements[i] instanceof SpdxFile) {
this.fileDependencies[j++] = (SpdxFile)fileDependencyElements[i];
}
}
}
}
} catch (InvalidSPDXAnalysisException ex) {
logger.error("Error getting file dependencies",ex);
}
}
return fileDependencies;
}
/**
* This method should no longer be used. The Relationship property should be used in its place.
* @param fileDependencies the fileDependencies to set
* @throws InvalidSPDXAnalysisException
*/
@Deprecated
public void setFileDependencies(SpdxFile[] fileDependencies) throws InvalidSPDXAnalysisException {
if (fileDependencies == null) {
this.fileDependencies = new SpdxFile[0];
} else {
this.fileDependencies = fileDependencies;
}
setPropertyValue(SpdxRdfConstants.SPDX_NAMESPACE,
SpdxRdfConstants.PROP_FILE_FILE_DEPENDENCY, this.fileDependencies);
}
@Override
public boolean equivalent(IRdfModel o) {
return this.equivalent(o, true);
}
@Override
public boolean equivalent(IRdfModel o, boolean testRelationships) {
if (o == this) {
return true;
}
if (!(o instanceof SpdxFile)) {
return false;
}
SpdxFile comp = (SpdxFile)o;
if (!super.equivalent(comp, testRelationships)) {
return false;
}
// compare based on properties
// Note: We don't compare the ID's since they may be different if they come
// from different models
return (arraysEquivalent(this.checksums, comp.getChecksums(), testRelationships) &&
RdfModelHelper.arraysEqual(this.fileTypes, comp.getFileTypes())&&
RdfModelHelper.arraysEqual(this.fileContributors, comp.getFileContributors()) &&
arraysEquivalent(this.artifactOf, comp.getArtifactOf(), testRelationships) &&
arraysEquivalent(this.fileDependencies, comp.getFileDependencies(), testRelationships) &&
RdfModelHelper.stringsEquivalent(this.noticeText, comp.getNoticeText()));
}
protected Checksum[] cloneChecksum() {
if (checksums == null) {
return null;
}
Checksum[] retval = new Checksum[checksums.length];
for (int i = 0; i < checksums.length; i++) {
retval[i] = checksums[i].clone();
}
return retval;
}
protected DoapProject[] cloneArtifactOf() {
if (this.artifactOf == null) {
return null;
}
DoapProject[] retval = new DoapProject[this.artifactOf.length];
for (int i = 0; i < this.artifactOf.length; i++) {
retval[i] = artifactOf[i].clone();
}
return retval;
}
public SpdxFile[] cloneFileDependencies(Map<String, SpdxElement> clonedElementIds) {
if (this.fileDependencies == null) {
return null;
}
SpdxFile[] retval = new SpdxFile[this.fileDependencies.length];
for (int i = 0; i < this.fileDependencies.length; i++) {
retval[i] = this.fileDependencies[i].clone(clonedElementIds);
}
return retval;
}
@Override
public SpdxFile clone(Map<String, SpdxElement> clonedElementIds) {
if (clonedElementIds.containsKey(this.getId())) {
return (SpdxFile)clonedElementIds.get(this.getId());
}
SpdxFile retval;
try {
retval = new SpdxFile(name, comment, cloneAnnotations(),
null, cloneLicenseConcluded(),
cloneLicenseInfosFromFiles(), copyrightText,
licenseComments, fileTypes, cloneChecksum(),
fileContributors, noticeText, cloneArtifactOf());
} catch (InvalidSPDXAnalysisException e) {
logger.error("Error cloning file: ",e);
retval = null;
}
clonedElementIds.put(this.getId(), retval);
if(retval != null){
try {
retval.setRelationships(cloneRelationships(clonedElementIds));
} catch (InvalidSPDXAnalysisException e) {
logger.error("Unexected error setting relationships during clone",e);
}
if (this.fileDependencies != null) {
try {
retval.setFileDependencies(cloneFileDependencies(clonedElementIds));
} catch (InvalidSPDXAnalysisException e1) {
logger.warn("Error setting file dependencies on clone", e1);
}
}
}
return retval;
}
@Override
public SpdxFile clone() {
return clone(Maps.<String, SpdxElement>newHashMap());
}
@Override
public List<String> verify() {
List<String> retval = super.verify();
String fileName = this.getName();
if (fileName == null) {
fileName = "UNKNOWN";
}
if (checksums == null || checksums.length == 0) {
retval.add("Missing required checksum for file "+fileName);
} else {
for (int i = 0; i < checksums.length; i++) {
List<String> verify = checksums[i].verify();
addNameToWarnings(verify);
retval.addAll(verify);
}
}
String sha1 = getSha1();
if (sha1 == null || sha1.isEmpty()) {
retval.add("Missing required SHA1 hashcode value for "+name);
}
DoapProject[] projects = this.getArtifactOf();
if (projects != null) {
for (int i = 0;i < projects.length; i++) {
retval.addAll(projects[i].verify());
}
}
// fileDependencies
if (fileDependencies != null) {
for (int i = 0; i < fileDependencies.length; i++) {
List<String> verifyFileDependency = fileDependencies[i].verify();
for (int j = 0; j < verifyFileDependency.size(); j++) {
retval.add("Invalid file dependency for file named "+
fileDependencies[i].getName()+": "+verifyFileDependency.get(j));
}
}
}
return retval;
}
/**
* This method is used for sorting a list of SPDX files
* @param file SPDXFile that is compared
* @return
*/
@Override
public int compareTo(SpdxFile file) {
int retval = this.getName().compareTo(file.getName());
return retval;
}
// the following methods are added as a TEMPORARY convenience to those
// migrating from the 1.2 version of the utilities
/**
* This method should be replaced by the more consistent getCopyrightText
* This method will be removed in a future release
* @return
*/
@Deprecated
public String getCopyright() {
return this.getCopyrightText();
}
/**
* This method should be replaced by the more consistent setCopyrightText
* This method will be removed in a future release
* @param copyright
*/
@Deprecated
public void setCopyright(String copyright) {
this.setCopyrightText(copyright);
}
/**
* This method should be replaced by the more consistent getLicenseConcluded
* This method will be removed in a future release
* @return
*/
@Deprecated
public AnyLicenseInfo getConcludedLicenses() {
return this.getLicenseConcluded();
}
/**
* This method should be replaced by the more consistent setLicenseConcluded
* This method will be removed in a future release
* @throws InvalidSPDXAnalysisException
*/
@Deprecated
public void setConcludedLicenses(AnyLicenseInfo concludedLicense) throws InvalidSPDXAnalysisException {
this.setLicenseConcluded(concludedLicense);
}
/**
* This method should be replaced by the more consistent getFileContributors
* This method will be removed in a future release
* @return
*/
@Deprecated
public String[] getContributors() {
return this.getFileContributors();
}
/**
* This method should be replaced by the more consistent setFileContributors
* This method will be removed in a future release
* @return
*/
@Deprecated
public void setContributors(String[] contributors) {
this.setFileContributors(contributors);
}
/**
* This method should be replaced by the more consistent getLicenseInfoFromFiles
* This method will be removed in a future release
* @return
*/
@Deprecated
public AnyLicenseInfo[] getSeenLicenses() {
return this.getLicenseInfoFromFiles();
}
/**
* This method should be replaced by the more consistent setLicenseInfoFromFiles
* This method will be removed in a future release
* @throws InvalidSPDXAnalysisException
*/
@Deprecated
public void setSeenLicenses(AnyLicenseInfo[] seenLicenses) throws InvalidSPDXAnalysisException {
this.setLicenseInfosFromFiles(seenLicenses);
}
}
| |
/*
* Created on 17/04/2005
*
* JRandTest package
*
* Copyright (c) 2005, Zur Aougav, aougav@hotmail.com
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of the JRandTest nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.fasteasytrade.JRandTest.Algo;
/**
* Skipjack algorithm copyright (c) 2005 Zur Aougav.
* <p>
* Hand translation to java from C implementation.
* <p>
* Originally written by Panu Rissanen <bande@lut.fi>1998.06.24 <br>
* optimized by Mark Tillotson <markt@chaos.org.uk>1998.06.25 <br>
* optimized by Paulo Barreto <pbarreto@nw.com.br>1998.06.30 <br>
* gnupg support by Werner Koch <dd9jn@amsat.org>1998.07.02 <br>
*
* @author Zur Aougav
*
*/
public class Skipjack extends Cipher {
boolean initialized = false;
byte[][] tab = new byte[10][256];
/**
* The F-table byte permutation (see description of the G-box permutation)
*/
static final byte[] fTable = { (byte) 0xa3, (byte) 0xd7, (byte) 0x09,
(byte) 0x83, (byte) 0xf8, (byte) 0x48, (byte) 0xf6, (byte) 0xf4,
(byte) 0xb3, (byte) 0x21, (byte) 0x15, (byte) 0x78, (byte) 0x99,
(byte) 0xb1, (byte) 0xaf, (byte) 0xf9, (byte) 0xe7, (byte) 0x2d,
(byte) 0x4d, (byte) 0x8a, (byte) 0xce, (byte) 0x4c, (byte) 0xca,
(byte) 0x2e, (byte) 0x52, (byte) 0x95, (byte) 0xd9, (byte) 0x1e,
(byte) 0x4e, (byte) 0x38, (byte) 0x44, (byte) 0x28, (byte) 0x0a,
(byte) 0xdf, (byte) 0x02, (byte) 0xa0, (byte) 0x17, (byte) 0xf1,
(byte) 0x60, (byte) 0x68, (byte) 0x12, (byte) 0xb7, (byte) 0x7a,
(byte) 0xc3, (byte) 0xe9, (byte) 0xfa, (byte) 0x3d, (byte) 0x53,
(byte) 0x96, (byte) 0x84, (byte) 0x6b, (byte) 0xba, (byte) 0xf2,
(byte) 0x63, (byte) 0x9a, (byte) 0x19, (byte) 0x7c, (byte) 0xae,
(byte) 0xe5, (byte) 0xf5, (byte) 0xf7, (byte) 0x16, (byte) 0x6a,
(byte) 0xa2, (byte) 0x39, (byte) 0xb6, (byte) 0x7b, (byte) 0x0f,
(byte) 0xc1, (byte) 0x93, (byte) 0x81, (byte) 0x1b, (byte) 0xee,
(byte) 0xb4, (byte) 0x1a, (byte) 0xea, (byte) 0xd0, (byte) 0x91,
(byte) 0x2f, (byte) 0xb8, (byte) 0x55, (byte) 0xb9, (byte) 0xda,
(byte) 0x85, (byte) 0x3f, (byte) 0x41, (byte) 0xbf, (byte) 0xe0,
(byte) 0x5a, (byte) 0x58, (byte) 0x80, (byte) 0x5f, (byte) 0x66,
(byte) 0x0b, (byte) 0xd8, (byte) 0x90, (byte) 0x35, (byte) 0xd5,
(byte) 0xc0, (byte) 0xa7, (byte) 0x33, (byte) 0x06, (byte) 0x65,
(byte) 0x69, (byte) 0x45, (byte) 0x00, (byte) 0x94, (byte) 0x56,
(byte) 0x6d, (byte) 0x98, (byte) 0x9b, (byte) 0x76, (byte) 0x97,
(byte) 0xfc, (byte) 0xb2, (byte) 0xc2, (byte) 0xb0, (byte) 0xfe,
(byte) 0xdb, (byte) 0x20, (byte) 0xe1, (byte) 0xeb, (byte) 0xd6,
(byte) 0xe4, (byte) 0xdd, (byte) 0x47, (byte) 0x4a, (byte) 0x1d,
(byte) 0x42, (byte) 0xed, (byte) 0x9e, (byte) 0x6e, (byte) 0x49,
(byte) 0x3c, (byte) 0xcd, (byte) 0x43, (byte) 0x27, (byte) 0xd2,
(byte) 0x07, (byte) 0xd4, (byte) 0xde, (byte) 0xc7, (byte) 0x67,
(byte) 0x18, (byte) 0x89, (byte) 0xcb, (byte) 0x30, (byte) 0x1f,
(byte) 0x8d, (byte) 0xc6, (byte) 0x8f, (byte) 0xaa, (byte) 0xc8,
(byte) 0x74, (byte) 0xdc, (byte) 0xc9, (byte) 0x5d, (byte) 0x5c,
(byte) 0x31, (byte) 0xa4, (byte) 0x70, (byte) 0x88, (byte) 0x61,
(byte) 0x2c, (byte) 0x9f, (byte) 0x0d, (byte) 0x2b, (byte) 0x87,
(byte) 0x50, (byte) 0x82, (byte) 0x54, (byte) 0x64, (byte) 0x26,
(byte) 0x7d, (byte) 0x03, (byte) 0x40, (byte) 0x34, (byte) 0x4b,
(byte) 0x1c, (byte) 0x73, (byte) 0xd1, (byte) 0xc4, (byte) 0xfd,
(byte) 0x3b, (byte) 0xcc, (byte) 0xfb, (byte) 0x7f, (byte) 0xab,
(byte) 0xe6, (byte) 0x3e, (byte) 0x5b, (byte) 0xa5, (byte) 0xad,
(byte) 0x04, (byte) 0x23, (byte) 0x9c, (byte) 0x14, (byte) 0x51,
(byte) 0x22, (byte) 0xf0, (byte) 0x29, (byte) 0x79, (byte) 0x71,
(byte) 0x7e, (byte) 0xff, (byte) 0x8c, (byte) 0x0e, (byte) 0xe2,
(byte) 0x0c, (byte) 0xef, (byte) 0xbc, (byte) 0x72, (byte) 0x75,
(byte) 0x6f, (byte) 0x37, (byte) 0xa1, (byte) 0xec, (byte) 0xd3,
(byte) 0x8e, (byte) 0x62, (byte) 0x8b, (byte) 0x86, (byte) 0x10,
(byte) 0xe8, (byte) 0x08, (byte) 0x77, (byte) 0x11, (byte) 0xbe,
(byte) 0x92, (byte) 0x4f, (byte) 0x24, (byte) 0xc5, (byte) 0x32,
(byte) 0x36, (byte) 0x9d, (byte) 0xcf, (byte) 0xf3, (byte) 0xa6,
(byte) 0xbb, (byte) 0xac, (byte) 0x5e, (byte) 0x6c, (byte) 0xa9,
(byte) 0x13, (byte) 0x57, (byte) 0x25, (byte) 0xb5, (byte) 0xe3,
(byte) 0xbd, (byte) 0xa8, (byte) 0x3a, (byte) 0x01, (byte) 0x05,
(byte) 0x59, (byte) 0x2a, (byte) 0x46 };
/**
* Preprocess a user key into a table to save and XOR at each F-table
* access.
*
* @param key
* key length must be >= 10 bytes. Process the first 10 bytes.
*/
boolean setupKey(byte[] key) {
int keylen = key.length;
int i;
if (keylen < 10)
return false;
if (!initialized)
initialized = true;
/*
* tab[i][c] = fTable[c ^ key[i]]
*/
for (i = 0; i < 10; i++) {
int k = 0xff & key[i];
int c;
for (c = 0; c < 256; c++)
tab[i][c] = fTable[c ^ k];
}
return true;
}
/**
* Encrypt a single block of data.
* <p>
* In and out blocks' length must be 8 bytes.
*
* @return false if input and output blocks are null, or length is not = 8.
* Else, true.
*/
public boolean encrypt_block(byte[] in, byte[] out) {
int w1, w2, w3, w4;
if (!initialized || in == null || out == null || in.length != 8
|| in.length != out.length)
return false;
w1 = ((0xff & in[0]) << 8) | (0xff & in[1]);
w2 = ((0xff & in[2]) << 8) | (0xff & in[3]);
w3 = ((0xff & in[4]) << 8) | (0xff & in[5]);
w4 = ((0xff & in[6]) << 8) | (0xff & in[7]);
/* stepping rule A: */
w1 = g0(w1);
w4 ^= w1 ^ 1;
w4 = g1(w4);
w3 ^= w4 ^ 2;
w3 = g2(w3);
w2 ^= w3 ^ 3;
w2 = g3(w2);
w1 ^= w2 ^ 4;
w1 = g4(w1);
w4 ^= w1 ^ 5;
w4 = g0(w4);
w3 ^= w4 ^ 6;
w3 = g1(w3);
w2 ^= w3 ^ 7;
w2 = g2(w2);
w1 ^= w2 ^ 8;
/* stepping rule B: */
w2 ^= w1 ^ 9;
w1 = g3(w1);
w1 ^= w4 ^ 10;
w4 = g4(w4);
w4 ^= w3 ^ 11;
w3 = g0(w3);
w3 ^= w2 ^ 12;
w2 = g1(w2);
w2 ^= w1 ^ 13;
w1 = g2(w1);
w1 ^= w4 ^ 14;
w4 = g3(w4);
w4 ^= w3 ^ 15;
w3 = g4(w3);
w3 ^= w2 ^ 16;
w2 = g0(w2);
/* stepping rule A: */
w1 = g1(w1);
w4 ^= w1 ^ 17;
w4 = g2(w4);
w3 ^= w4 ^ 18;
w3 = g3(w3);
w2 ^= w3 ^ 19;
w2 = g4(w2);
w1 ^= w2 ^ 20;
w1 = g0(w1);
w4 ^= w1 ^ 21;
w4 = g1(w4);
w3 ^= w4 ^ 22;
w3 = g2(w3);
w2 ^= w3 ^ 23;
w2 = g3(w2);
w1 ^= w2 ^ 24;
/* stepping rule B: */
w2 ^= w1 ^ 25;
w1 = g4(w1);
w1 ^= w4 ^ 26;
w4 = g0(w4);
w4 ^= w3 ^ 27;
w3 = g1(w3);
w3 ^= w2 ^ 28;
w2 = g2(w2);
w2 ^= w1 ^ 29;
w1 = g3(w1);
w1 ^= w4 ^ 30;
w4 = g4(w4);
w4 ^= w3 ^ 31;
w3 = g0(w3);
w3 ^= w2 ^ 32;
w2 = g1(w2);
out[0] = (byte) (w1 >>> 8);
out[1] = (byte) w1;
out[2] = (byte) (w2 >>> 8);
out[3] = (byte) w2;
out[4] = (byte) (w3 >>> 8);
out[5] = (byte) w3;
out[6] = (byte) (w4 >>> 8);
out[7] = (byte) w4;
return true;
}
/**
* Decrypt a single block of data.
* <p>
* In and out blocks' length must be 8 bytes.
*
* @return false if input and output blocks are null, or length is not = 8.
* Else, true.
*/
public boolean decrypt_block(byte[] in, byte[] out) {
int w1, w2, w3, w4;
if (!initialized || in == null || out == null || in.length != 8
|| in.length != out.length)
return false;
w1 = ((0xff & in[0]) << 8) | (0xff & in[1]);
w2 = ((0xff & in[2]) << 8) | (0xff & in[3]);
w3 = ((0xff & in[4]) << 8) | (0xff & in[5]);
w4 = ((0xff & in[6]) << 8) | (0xff & in[7]);
/* stepping rule A: */
w2 = h1(w2);
w3 ^= w2 ^ 32;
w3 = h0(w3);
w4 ^= w3 ^ 31;
w4 = h4(w4);
w1 ^= w4 ^ 30;
w1 = h3(w1);
w2 ^= w1 ^ 29;
w2 = h2(w2);
w3 ^= w2 ^ 28;
w3 = h1(w3);
w4 ^= w3 ^ 27;
w4 = h0(w4);
w1 ^= w4 ^ 26;
w1 = h4(w1);
w2 ^= w1 ^ 25;
/* stepping rule B: */
w1 ^= w2 ^ 24;
w2 = h3(w2);
w2 ^= w3 ^ 23;
w3 = h2(w3);
w3 ^= w4 ^ 22;
w4 = h1(w4);
w4 ^= w1 ^ 21;
w1 = h0(w1);
w1 ^= w2 ^ 20;
w2 = h4(w2);
w2 ^= w3 ^ 19;
w3 = h3(w3);
w3 ^= w4 ^ 18;
w4 = h2(w4);
w4 ^= w1 ^ 17;
w1 = h1(w1);
/* stepping rule A: */
w2 = h0(w2);
w3 ^= w2 ^ 16;
w3 = h4(w3);
w4 ^= w3 ^ 15;
w4 = h3(w4);
w1 ^= w4 ^ 14;
w1 = h2(w1);
w2 ^= w1 ^ 13;
w2 = h1(w2);
w3 ^= w2 ^ 12;
w3 = h0(w3);
w4 ^= w3 ^ 11;
w4 = h4(w4);
w1 ^= w4 ^ 10;
w1 = h3(w1);
w2 ^= w1 ^ 9;
/* stepping rule B: */
w1 ^= w2 ^ 8;
w2 = h2(w2);
w2 ^= w3 ^ 7;
w3 = h1(w3);
w3 ^= w4 ^ 6;
w4 = h0(w4);
w4 ^= w1 ^ 5;
w1 = h4(w1);
w1 ^= w2 ^ 4;
w2 = h3(w2);
w2 ^= w3 ^ 3;
w3 = h2(w3);
w3 ^= w4 ^ 2;
w4 = h1(w4);
w4 ^= w1 ^ 1;
w1 = h0(w1);
out[0] = (byte) (w1 >>> 8);
out[1] = (byte) w1;
out[2] = (byte) (w2 >>> 8);
out[3] = (byte) w2;
out[4] = (byte) (w3 >>> 8);
out[5] = (byte) w3;
out[6] = (byte) (w4 >>> 8);
out[7] = (byte) w4;
return true;
}
/**
* The key-dependent permutation G on V^16 is a four-round Feistel network.
* The round function is a fixed byte-substitution table (permutation on
* V^8), the F-table. Each round of G incorporates a single byte from the
* key.
*/
int g(int w, int i, int j, int k, int l) {
w ^= 0xffff & (tab[i][w & 0xff] << 8);
w ^= 0xff & tab[j][w >>> 8];
w ^= 0xffff & (tab[k][w & 0xff] << 8);
w ^= 0xff & tab[l][w >>> 8];
return w;
}
int g0(int w) {
return g(w, 0, 1, 2, 3);
}
int g1(int w) {
return g(w, 4, 5, 6, 7);
}
int g2(int w) {
return g(w, 8, 9, 0, 1);
}
int g3(int w) {
return g(w, 2, 3, 4, 5);
}
int g4(int w) {
return g(w, 6, 7, 8, 9);
}
/**
* The inverse of the G permutation.
*/
int h(int w, int i, int j, int k, int l) {
w ^= 0xff & tab[l][w >>> 8];
w ^= 0xffff & (tab[k][w & 0xff] << 8);
w ^= 0xff & tab[j][w >>> 8];
w ^= 0xffff & (tab[i][w & 0xff] << 8);
return w;
}
int h0(int w) {
return h(w, 0, 1, 2, 3);
}
int h1(int w) {
return h(w, 4, 5, 6, 7);
}
int h2(int w) {
return h(w, 8, 9, 0, 1);
}
int h3(int w) {
return h(w, 2, 3, 4, 5);
}
int h4(int w) {
return h(w, 6, 7, 8, 9);
}
public static String byte2hex(byte b) {
final String hex = "0123456789ABCDEF";
return "" + hex.charAt((0xf0 & b) >>> 4) + hex.charAt(0x0f & b);
}
public void Test() {
byte[] inp = { (byte) 0x33, (byte) 0x22, (byte) 0x11, (byte) 0x00,
(byte) 0xdd, (byte) 0xcc, (byte) 0xbb, (byte) 0xaa };
byte[] Key = { (byte) 0x00, (byte) 0x99, (byte) 0x88, (byte) 0x77,
(byte) 0x66, (byte) 0x55, (byte) 0x44, (byte) 0x33,
(byte) 0x22, (byte) 0x11 };
byte[] enc = new byte[8];
byte[] dec = new byte[8];
byte[] chk = { (byte) 0x25, (byte) 0x87, (byte) 0xca, (byte) 0xe2,
(byte) 0x7a, (byte) 0x12, (byte) 0xd3, (byte) 0x00 };
if (!setupKey(Key)) {
System.out.println("Error: unable to set key");
return;
}
encrypt_block(inp, enc);
System.out.print("enc=");
for (int i = 0; i < enc.length; i++)
System.out.print(",0x" + byte2hex(enc[i]));
System.out.println();
System.out.print("chk=");
for (int i = 0; i < chk.length; i++)
System.out.print(",0x" + byte2hex(chk[i]));
System.out.println();
if (compareBytes(enc, chk))
System.out.println("Skipjack test encryption is OK");
else
System.out.println("Skipjack test encryption failed");
decrypt_block(enc, dec);
if (compareBytes(dec, inp))
System.out.println("Skipjack test decryption is OK");
else
System.out.println("Skipjack test decryption failed");
}
public static void main(String[] args) {
Skipjack algo = new Skipjack();
algo.Test();
}
}
| |
package com.nobullet.math.expression;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.LinkedList;
import java.util.List;
/**
* Reverse polish notation.
*/
public class ReversePolishNotation {
private final List<ExpressionPart> reverseNotation;
public ReversePolishNotation() {
reverseNotation = new LinkedList<>();
}
public void add(ExpressionPart part) {
reverseNotation.add(part);
}
public List<ExpressionPart> getResult() {
return reverseNotation;
}
@Override
public String toString() {
return reverseNotation.toString();
}
/**
* Tries to remove constant expressions like (5 * 6 - x) => (30 - x)
*
* @param mc Math context.
* @return Normalized reverse Polish notation.
*/
public ReversePolishNotation normalize(MathContext mc) {
Deque<ExpressionPart> optimized = new ArrayDeque<>(reverseNotation.size());
for (ExpressionPart pt : reverseNotation) {
optimized.push(pt);
if (pt instanceof Operation && optimized.size() >= 3) {
Operation top = (Operation) pt;
if (top.hasArithmeticSign()) {
optimized.pop();
ExpressionPart beforeTop = optimized.pop();
ExpressionPart beforeBeforeTop = optimized.pop();
if (beforeTop instanceof Operand && beforeBeforeTop instanceof Operand) {
Operand newOperand = top.apply(mc, (Operand) beforeBeforeTop, (Operand) beforeTop);
optimized.push(newOperand);
} else {
optimized.push(beforeBeforeTop);
optimized.push(beforeTop);
optimized.push(top);
}
}
}
}
reverseNotation.clear();
while (!optimized.isEmpty()) {
reverseNotation.add(optimized.removeLast());
}
return this;
}
/**
* Expression parser.
*/
public static class Parser {
private final String expression;
private final StringBuilder numberBuilder;
private final StringBuilder nameBuilder;
private final ReversePolishNotation result;
/**
* Parses given expression.
*
* @param expression Expression as string.
*/
public Parser(String expression) {
this.expression = expression;
this.numberBuilder = new StringBuilder();
this.nameBuilder = new StringBuilder();
this.result = new ReversePolishNotation();
}
private void processDot(char c) {
if (numberBuilder.length() == 0) {
numberBuilder.append('0');
}
numberBuilder.append(c);
}
private void processLeftBrace(Deque<ExpressionPart> tempStack) {
if (nameBuilder.length() != 0) {
if (processFunction(tempStack)) {
result.add(EndOfFunctionArguments.INSTANCE);
}
}
tempStack.push(LeftBrace.INSTANCE);
}
private void processOperation(Deque<ExpressionPart> tempStack, Operation op) {
processNumber();
processVariable();
ExpressionPart p;
while (((p = tempStack.peek()) instanceof Operation)
&& (!op.isRightAssociated() && op.getPriority() <= ((Operation) p).getPriority()
|| op.isRightAssociated() && op.getPriority() < ((Operation) p).getPriority())) {
result.add(tempStack.pop());
}
tempStack.push(op);
}
private void processRightBrace(Deque<ExpressionPart> tempStack) {
processNumber();
processVariable();
ExpressionPart p;
boolean foundMatching = false;
while (!tempStack.isEmpty() && !((p = tempStack.pop()) == LeftBrace.INSTANCE && (foundMatching = true))) {
result.add(p);
}
if (!foundMatching) {
throw new IllegalStateException("The expression " + expression + " has unmatched braces.");
}
if (tempStack.peek() instanceof Function) {
result.add(tempStack.pop());
}
}
private void processComma(Deque<ExpressionPart> tempStack) {
processNumber();
processVariable();
ExpressionPart p;
boolean foundMatching = false;
while (!tempStack.isEmpty() && !((p = tempStack.pop()) == LeftBrace.INSTANCE && (foundMatching = true))) {
result.add(p);
}
if (!foundMatching) {
throw new IllegalStateException("The expression " + expression + " has unmatched braces.");
} else {
tempStack.push(LeftBrace.INSTANCE);
}
}
private boolean processVariable() {
if (nameBuilder.length() == 0) {
return false;
}
result.add(new Variable(nameBuilder.toString()));
nameBuilder.delete(0, nameBuilder.length());
return true;
}
private boolean processFunction(Deque<ExpressionPart> tempStack) {
if (nameBuilder.length() == 0) {
return false;
}
String funcName = nameBuilder.toString();
Operation func = OperationFactory.forName(funcName);
if (func != null) {
tempStack.push(func);
} else {
throw new IllegalStateException("Can't find function " + funcName + ".");
}
nameBuilder.delete(0, nameBuilder.length());
return true;
}
private boolean processNumber() {
if (numberBuilder.length() == 0) {
return false;
}
// ??? what's that?
if (numberBuilder.charAt(numberBuilder.length() - 1) == '.') { // if number ends with '.' ...
result.add(new Operand(BigDecimal.ZERO));
numberBuilder.delete(0, numberBuilder.length());
return true;
}
result.add(new Operand(BigDecimal.valueOf(Double.valueOf(numberBuilder.toString()))));
numberBuilder.delete(0, numberBuilder.length());
return true;
}
/**
* Constructs reverse polish notation.
*
* @return Reverse polish notation for parsed expression.
*/
public ReversePolishNotation toNotation() {
Deque<ExpressionPart> tempStack = new ArrayDeque<>();
int i = 0;
int l = expression.length();
boolean isName = false;
while (i < l) {
char c = expression.charAt(i);
if (Character.isLetter(c) || c == '_') {
nameBuilder.append(c);
isName = true;
} else if (Character.isDigit(c)) {
if (isName) {
nameBuilder.append(c);
} else {
numberBuilder.append(c);
}
} else if (c == '(') {
isName = false;
processLeftBrace(tempStack);
} else if (c == ')') {
isName = false;
processRightBrace(tempStack);
} else if (c == '.') {
isName = false;
processDot(c);
} else if (c == ',') {
isName = false;
processComma(tempStack);
} else {
isName = false;
if (c == '-' && numberBuilder.length() == 0 && nameBuilder.length() == 0) {
// Unary minus.
result.add(new Operand(BigDecimal.ZERO));
}
Operation op = OperationFactory.forName("" + c);
if (op != null) {
processOperation(tempStack, op);
}
}
i++;
}
processNumber();
processVariable();
processFunction(tempStack);
while (!tempStack.isEmpty()) {
result.add(tempStack.pop());
}
if (result.reverseNotation.contains(LeftBrace.INSTANCE)) {
throw new IllegalStateException("The expression " + expression + " has unmatched braces.");
}
return result;
}
}
}
| |
package com.pacoapp.paco.ui;
import java.util.List;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.pacoapp.paco.R;
import com.pacoapp.paco.model.Event;
import com.pacoapp.paco.model.Experiment;
import com.pacoapp.paco.model.ExperimentProviderUtil;
import com.pacoapp.paco.model.Output;
import com.pacoapp.paco.net.SyncService;
import com.pacoapp.paco.sensors.android.BroadcastTriggerReceiver;
import com.pacoapp.paco.shared.model2.ActionTrigger;
import com.pacoapp.paco.shared.model2.ExperimentGroup;
import com.pacoapp.paco.shared.model2.Schedule;
import com.pacoapp.paco.shared.model2.ScheduleTrigger;
import com.pacoapp.paco.shared.util.ExperimentHelper;
import com.pacoapp.paco.shared.util.SchedulePrinter;
import com.pacoapp.paco.triggering.AndroidEsmSignalStore;
import com.pacoapp.paco.triggering.BeeperService;
import com.pacoapp.paco.utils.IntentExtraHelper;
import android.content.Context;
import android.content.Intent;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.os.Bundle;
import android.support.v4.app.NavUtils;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarActivity;
import android.telephony.TelephonyManager;
import android.view.Display;
import android.view.MenuItem;
import android.widget.Toast;
/**
* An activity representing a list of Schedules. This activity has different
* presentations for handset and tablet-size devices. On handsets, the activity
* presents a list of items, which when touched, lead to a
* {@link ScheduleDetailActivity} representing item details. On tablets, the
* activity presents the list of items and item details side-by-side using two
* vertical panes.
* <p>
* The activity makes heavy use of fragments. The list of items is a
* {@link ScheduleListFragment} and the item details (if present) is a
* {@link ScheduleDetailFragment}.
* <p>
* This activity also implements the required
* {@link ScheduleListFragment.Callbacks} interface to listen for item
* selections.
*/
public class ScheduleListActivity extends ActionBarActivity implements ScheduleListFragment.Callbacks,
ExperimentLoadingActivity {
private static Logger Log = LoggerFactory.getLogger(ScheduleListActivity.class);
private static final int SCHEDULE_DETAIL_REQUEST = 998;
private Experiment experiment;
private ExperimentProviderUtil experimentProviderUtil;
private boolean fromInformedConsentPage;
private ExperimentGroup experimentGroup;
/**
* Whether or not the activity is in two-pane mode, i.e. running on a tablet
* device.
*/
private boolean twoPane;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Log.debug("ScheduleListActivity onCreate");
experimentProviderUtil = new ExperimentProviderUtil(this);
fromInformedConsentPage = getIntent().getExtras() != null ? getIntent().getExtras()
.getBoolean(InformedConsentActivity.INFORMED_CONSENT_PAGE_EXTRA_KEY)
: false;
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
ActionBar actionBar = getSupportActionBar();
actionBar.setLogo(R.drawable.ic_launcher);
actionBar.setDisplayUseLogoEnabled(true);
actionBar.setDisplayShowHomeEnabled(true);
actionBar.setBackgroundDrawable(new ColorDrawable(0xff4A53B3));
IntentExtraHelper.loadExperimentInfoFromIntent(this, getIntent(), experimentProviderUtil);
if (!getUserEditableFromIntent()) {
save();
finish();
} else {
if (experiment == null) {
Toast.makeText(this, R.string.cannot_find_the_experiment_warning, Toast.LENGTH_SHORT).show();
finish();
} else {
setupScheduleSaving();
}
}
setContentView(R.layout.activity_schedule_list);
if (findViewById(R.id.schedule_detail_container) != null) {
// The detail container view will be present only in the
// large-screen layouts (res/values-large and
// res/values-sw600dp). If this view is present, then the
// activity should be in two-pane mode.
twoPane = true;
// In two-pane mode, list items should be given the
// 'activated' state when touched.
ScheduleListFragment scheduleListFragment = (ScheduleListFragment) getSupportFragmentManager().findFragmentById(R.id.schedule_list);
scheduleListFragment.setActivateOnItemClick(true);
}
}
/**
* Callback method from {@link ScheduleListFragment.Callbacks} indicating that
* the item with the given ID was selected.
*/
@Override
public void onItemSelected(ScheduleBundle chosenSchedule) {
if (twoPane) {
// In two-pane mode, show the detail view in this activity by
// adding or replacing the detail fragment using a
// fragment transaction.
Bundle arguments = new Bundle();
arguments.putLong(Experiment.EXPERIMENT_SERVER_ID_EXTRA_KEY, experiment.getExperimentDAO().getId());
arguments.putString(Experiment.EXPERIMENT_GROUP_NAME_EXTRA_KEY, chosenSchedule.group.getName());
arguments.putLong(ScheduleDetailFragment.SCHEDULE_TRIGGER_ID, chosenSchedule.trigger.getId());
arguments.putLong(ScheduleDetailFragment.SCHEDULE_ID, chosenSchedule.schedule.getId());
ScheduleDetailFragment fragment = new ScheduleDetailFragment();
fragment.setArguments(arguments);
getSupportFragmentManager().beginTransaction().replace(R.id.schedule_detail_container, fragment).commit();
} else {
// In single-pane mode, simply start the detail activity
// for the selected item ID.
Intent detailIntent = new Intent(this, ScheduleDetailActivity.class);
detailIntent.putExtra(Experiment.EXPERIMENT_SERVER_ID_EXTRA_KEY, experiment.getExperimentDAO().getId());
detailIntent.putExtra(Experiment.EXPERIMENT_GROUP_NAME_EXTRA_KEY, chosenSchedule.group.getName());
detailIntent.putExtra(ScheduleDetailFragment.SCHEDULE_TRIGGER_ID, chosenSchedule.trigger.getId());
detailIntent.putExtra(ScheduleDetailFragment.SCHEDULE_ID, chosenSchedule.schedule.getId());
startActivityForResult(detailIntent, SCHEDULE_DETAIL_REQUEST);
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == SCHEDULE_DETAIL_REQUEST && resultCode != RESULT_CANCELED) {
save();
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == android.R.id.home) {
// See the Navigation pattern on Android Design:
// http://developer.android.com/design/patterns/navigation.html#up-vs-back
final Intent intent = new Intent(this, MyExperimentsActivity.class);
NavUtils.navigateUpTo(this, intent);
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
}
private boolean getUserEditableFromIntent() {
if (getIntent().getExtras() != null) {
return getIntent().getBooleanExtra(ScheduleDetailFragment.USER_EDITABLE_SCHEDULE, true);
}
return true;
}
private void setupScheduleSaving() {
if (!userCanEditAtLeastOneSchedule()) {
save();
} else {
// setupSaveButton();
}
}
private Boolean userCanEditAtLeastOneSchedule() {
List<ExperimentGroup> groups = experiment.getExperimentDAO().getGroups();
for (ExperimentGroup experimentGroup : groups) {
List<ActionTrigger> actionTriggers = experimentGroup.getActionTriggers();
for (ActionTrigger actionTrigger : actionTriggers) {
if (actionTrigger instanceof ScheduleTrigger) {
ScheduleTrigger scheduleTrigger = (ScheduleTrigger)actionTrigger;
List<Schedule> schedules = scheduleTrigger.getSchedules();
for (Schedule schedule : schedules) {
if (schedule.getUserEditable()) {
boolean userCanOnlyEditOnJoin = schedule.getOnlyEditableOnJoin();
if (!userCanOnlyEditOnJoin || (userCanOnlyEditOnJoin && fromInformedConsentPage)) {
return true;
}
}
}
}
}
}
return false;
}
private void saveExperimentRegistration() {
Log.debug("saveExperimentRegistration");
boolean hasEsm = false;
for (ExperimentGroup experimentGroup : experiment.getExperimentDAO().getGroups()) {
List<ActionTrigger> actionTriggers = experimentGroup.getActionTriggers();
for (ActionTrigger actionTrigger : actionTriggers) {
if (actionTrigger instanceof ScheduleTrigger) {
ScheduleTrigger scheduleTrigger = (ScheduleTrigger) actionTrigger;
for (Schedule schedule : scheduleTrigger.getSchedules()) {
if (schedule != null && schedule.getScheduleType().equals(Schedule.ESM)) {
hasEsm = true;
}
}
}
}
}
if (hasEsm) {
AndroidEsmSignalStore alarmStore = new AndroidEsmSignalStore(this);
alarmStore.deleteAllSignalsForSurvey(experiment.getExperimentDAO().getId());
}
experimentProviderUtil.deleteNotificationsForExperiment(experiment.getId());
experimentProviderUtil.updateJoinedExperiment(experiment);
}
/**
* Creates a pacot for a newly registered experiment
*/
private void createJoinEvent() {
Event event = new Event();
event.setExperimentId(experiment.getId());
event.setServerExperimentId(experiment.getServerId());
event.setExperimentName(experiment.getExperimentDAO().getTitle());
event.setExperimentGroupName(null);
event.setActionTriggerId(null);
event.setActionTriggerSpecId(null);
event.setActionId(null);
event.setExperimentVersion(experiment.getExperimentDAO().getVersion());
event.setResponseTime(new DateTime());
//event.addResponse(createOutput("joined", "true"));
event.addResponse(createOutput("schedule", SchedulePrinter.createStringOfAllSchedules(experiment.getExperimentDAO())));
if (experiment.getExperimentDAO().getRecordPhoneDetails()) {
Display defaultDisplay = getWindowManager().getDefaultDisplay();
String size = Integer.toString(defaultDisplay.getHeight()) + "x" + Integer.toString(defaultDisplay.getWidth());
event.addResponse(createOutput("display", size));
event.addResponse(createOutput("make", Build.MANUFACTURER));
event.addResponse(createOutput("model", Build.MODEL));
event.addResponse(createOutput("android", Build.VERSION.RELEASE));
TelephonyManager manager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
String carrierName = manager.getNetworkOperatorName();
event.addResponse(createOutput("carrier", carrierName));
}
experimentProviderUtil.insertEvent(event);
}
private Output createOutput(String key, String answer) {
Output responseForInput = new Output();
responseForInput.setAnswer(answer);
responseForInput.setName(key);
return responseForInput;
}
private void save() {
Log.debug("save");
if (!userCanEditAtLeastOneSchedule()) {
setResult(FindExperimentsActivity.JOINED_EXPERIMENT);
finish();
} else {
scheduleExperiment();
Toast.makeText(this, getString(R.string.success), Toast.LENGTH_LONG).show();
}
}
// Visible for testing
public void scheduleExperiment() {
saveExperimentRegistration();
createJoinEvent();
startService(new Intent(this, SyncService.class));
setResult(FindExperimentsActivity.JOINED_EXPERIMENT);
startService(new Intent(ScheduleListActivity.this, BeeperService.class));
if (ExperimentHelper.shouldWatchProcesses(experiment.getExperimentDAO())) {
BroadcastTriggerReceiver.initPollingAndLoggingPreference(this);
BroadcastTriggerReceiver.startProcessService(this);
}
finish();
}
// Visible for testing
public Experiment getExperiment() {
return experiment;
}
@Override
public void setExperiment(Experiment experimentByServerId) {
this.experiment = experimentByServerId;
}
@Override
public void setExperimentGroup(ExperimentGroup groupByName) {
this.experimentGroup = groupByName;
}
@Override
public void saveExperiment() {
save();
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package hydra.gemfirexd;
import com.gemstone.gemfire.LogWriter;
import com.gemstone.gemfire.distributed.DistributedSystem;
import com.gemstone.gemfire.distributed.Locator;
import com.gemstone.gemfire.distributed.internal.DistributionConfig;
import com.pivotal.gemfirexd.FabricLocator;
import com.pivotal.gemfirexd.FabricServer;
import com.pivotal.gemfirexd.FabricService;
import com.pivotal.gemfirexd.FabricService.State;
import com.pivotal.gemfirexd.FabricServiceManager;
import com.pivotal.gemfirexd.internal.iapi.reference.Property;
import hydra.BasePrms;
import hydra.DistributedSystemHelper;
import hydra.EnvHelper;
import hydra.FileUtil;
import hydra.HostDescription;
import hydra.HostHelper;
import hydra.HostHelper.OSType;
import hydra.HydraInternalException;
import hydra.HydraRuntimeException;
import hydra.Log;
import hydra.MasterController;
import hydra.PortHelper;
import hydra.ProcessMgr;
import hydra.RemoteTestModule;
import hydra.TestConfig;
import hydra.blackboard.SharedCounters;
import java.io.File;
import java.io.Serializable;
import java.sql.SQLException;
import java.util.*;
/**
* Helps hydra clients manage fabric servers and stand-alone locators.
* Methods are thread-safe.
*/
public class FabricServerHelper {
/** Description used to start the current fabric server */
protected static FabricServerDescription TheFabricServerDescription;
/** Properties used to start the current fabric server */
private static Properties TheFabricServerProperties;
/** Perpetual endpoint for the locator (if any). */
private static Endpoint TheLocatorEndpoint;
private static LogWriter log = Log.getLogWriter();
//------------------------------------------------------------------------------
// FabricServer
//------------------------------------------------------------------------------
/**
* Starts a fabric server using boot properties from the {@link
* FabricServerDescription} to which this VM is wired using {@link
* FabricServerPrms#clientNames}. Returns the existing fabric server
* if it is already started.
*
* @throws HydraRuntimeException if an attempt is made to reconfigure an
* existing fabric server.
*/
public static synchronized FabricServer startFabricServer() {
log.info("Starting the fabric server");
Properties p = getBootProperties();
return startFabricServer(p);
}
/**
* Starts a fabric server using the given boot properties, typically
* modified properties obtained using {@link #getBootProperties} or {@link
* #getBootProperties(String)}. Returns the existing fabric server if it
* is already started.
*
* @throws HydraRuntimeException if an attempt is made to reconfigure an
* existing fabric server.
*/
public static synchronized FabricServer startFabricServer(
Properties bootProperties) {
// default to partitioned tables for tests
log.info("Starting the fabric server: " + prettyprint(bootProperties));
FabricServer fs = FabricServiceManager.getFabricServerInstance();
FabricService.State status = fs.status();
switch (status) {
case UNINITIALIZED:
case STOPPED:
try {
fs.start(bootProperties);
} catch (SQLException e) {
String s = "Unable to start fabric server";
throw new HydraRuntimeException(s, e);
}
fs = FabricServiceManager.getFabricServerInstance();
FabricService.State statusNow = fs.status();
if (statusNow != FabricService.State.RUNNING) {
String s = "Expected fabric server to be RUNNING, but it is: "
+ statusNow;
throw new HydraRuntimeException(s);
}
log.info("Started the fabric server");
// save the boot properties for future reference
TheFabricServerProperties = bootProperties;
break;
case RUNNING:
if (TheFabricServerProperties == null) {
// block attempt to start fabric server in multiple ways
String s = "Fabric server was already started without"
+ " FabricServerHelper using an unknown, and possibly"
+ " different, configuration";
throw new HydraRuntimeException(s);
} else {
if (!TheFabricServerProperties.equals(bootProperties)) {
// block attempt to reconnect fabric server with clashing properties
String s = "Fabric server already exists using boot properties "
+ TheFabricServerProperties + ", cannot also use "
+ bootProperties;
throw new HydraRuntimeException(s);
}
} // else it already uses these properties, which is fine
log.info("Already started the fabric server");
break;
case STARTING:
case STOPPING:
case WAITING:
default:
throw new HydraRuntimeException("Unexpected state: " + status);
}
return fs;
}
/**
* Returns the fabric server instance, regardless of its state.
*/
public static synchronized FabricServer getFabricServer() {
return FabricServiceManager.getFabricServerInstance();
}
/**
* Stops the fabric server if it is running, using shutdown properties from
* the {@link FabricServerDescription} to which this VM is wired using {@link
* FabricServerPrms#clientNames}.
*
* @throws HydraRuntimeException if there is a problem stopping the server.
*/
public static synchronized void stopFabricServer() {
log.info("Stopping the fabric server");
Properties p = getShutdownProperties();
stopFabricServer(p);
}
/**
* Stops the fabric server if it is running, using the given shutdown
* properties, typically modified properties obtained using {@link
* #getShutdownProperties} or {@link #getShutdownProperties(String)}.
*
* @throws HydraRuntimeException if there is a problem stopping the server.
*/
public static synchronized void stopFabricServer(
Properties shutdownProperties) {
log.info("Stopping the fabric server: " + prettyprint(shutdownProperties));
FabricService fs = FabricServiceManager.currentFabricServiceInstance();
if (fs == null) {
log.info("Fabric server already stopped.");
return;
}
FabricService.State status = fs.status();
switch (status) {
case UNINITIALIZED:
case STOPPED:
log.info("Fabric server already stopped.");
break;
case RUNNING:
try {
fs.stop(shutdownProperties);
} catch (SQLException e) {
String s = "Unable to stop fabric server";
throw new HydraRuntimeException(s, e);
}
FabricService.State statusNow = fs.status();
if (statusNow != FabricService.State.STOPPED) {
String s = "Expected fabric server to be STOPPED, but it is: "
+ statusNow;
throw new HydraRuntimeException(s);
}
log.info("Stopped the fabric server");
// allow the next start to have a different config
TheFabricServerDescription = null;
TheFabricServerProperties = null;
break;
case STARTING:
case STOPPING:
case WAITING:
default:
throw new HydraRuntimeException("Unexpected state: " + status);
}
}
/**
* Issues "gfxd shut-down-all" to a locator in the
* distributed system of this JVM. Waits the given number of seconds for
* the shut-down-all command to complete.
*
* @throws HydraRuntimeException if no locator is found.
*/
public static synchronized void shutDownAllFabricServers(
int shutDownAllCmdWaitSec) {
log.info("Issuing shut-down-all command");
List<Endpoint> locators = getSystemEndpoints();
if (locators.size() == 0) {
throw new HydraRuntimeException("No locators found");
}
String locId = locators.get(0).getId();
String cmd = getGFXDCommand() + "shut-down-all -locators=" + locId;
String output = ProcessMgr.fgexec(cmd, shutDownAllCmdWaitSec);
log.info("Issued shut-down-all command:\n" + output);
}
/**
* Issues "gfxd shut-down-all" to a locator in the specified
* distributed system. Waits the given number of seconds for
* the shut-down-all command to complete.
*
* @throws HydraRuntimeException if no locator is found.
*/
public static synchronized void shutDownAllFabricServers(
String dsName, int shutDownAllCmdWaitSec) {
log.info("Issuing shut-down-all command to " + dsName);
List<Endpoint> locators = getEndpoints(dsName);
if (locators.size() == 0) {
throw new HydraRuntimeException("No locators found");
}
String locId = locators.get(0).getId();
String cmd = getGFXDCommand() + "shut-down-all -locators=" + locId;
String output = ProcessMgr.fgexec(cmd, shutDownAllCmdWaitSec);
log.info("Issued shut-down-all command:\n" + output);
}
/**
* Issues "gfxd run" on the specified file to a locator in the specified
* distributed system. Waits the given number of seconds for
* the run command to complete.
*
* @throws HydraRuntimeException if no locator is found.
*/
public static synchronized void executeSQLCommands(
String dsName, String fn, int waitSec) {
log.info("Issuing run command to " + dsName + " using " + fn);
List<Endpoint> locators = getEndpoints(dsName);
if (locators.size() == 0) {
throw new HydraRuntimeException("No locators found");
}
Endpoint endpoint = locators.get(0);
String cmd = getGFXDCommand() + "run -file=" + fn
+ " -locators=" + endpoint.getId();
String output = ProcessMgr.fgexec(cmd, waitSec);
log.info("Issued run command to " + dsName + " using " + fn);
}
/**
* Returns true if there is a fabric server in the RUNNING state in this JVM.
* All other states return false.
*/
public static boolean isFabricServerRunning() {
FabricService fs = FabricServiceManager.currentFabricServiceInstance();
return fs != null && fs.status().equals(FabricService.State.RUNNING);
}
/**
* Returns true if there is a fabric server in the STOPPED state in this JVM
* or if no fabric server exists. All other states return false.
*/
public static boolean isFabricServerStopped() {
FabricService fs = FabricServiceManager.currentFabricServiceInstance();
return fs == null || fs.status().equals(FabricService.State.STOPPED);
}
/**
* Returns the gfxd or gfxd.bat command. Simply add arguments and exec.
*/
public static String getGFXDCommand() {
HostDescription hd = TestConfig.getInstance()
.getClientDescription(RemoteTestModule.getMyClientName())
.getVmDescription().getHostDescription();
char sep = hd.getFileSep();
String productBin = hd.getGemFireHome() + sep + ".." + sep + "snappy"
+ sep + "bin";
String productHiddenBin = hd.getGemFireHome() + sep + ".." + sep + "snappy"
+ sep + "bin";
String gfxdScript = null;
switch (hd.getOSType()) {
case unix:
gfxdScript = "snappy-shell";
break;
case windows:
gfxdScript = "snappy-shell.bat";
break;
}
String gfxd = productBin + sep + gfxdScript;
if (!FileUtil.exists(gfxd)) {
gfxd = productHiddenBin + sep + gfxdScript;
if (!FileUtil.exists(gfxd)) {
String s = gfxdScript + " not found in " + productBin
+ " or " + productHiddenBin;
throw new HydraRuntimeException(s);
}
}
String cmd = null;
switch (hd.getOSType()) {
case unix:
cmd = "env GFXD_JAVA=" + hd.getJavaHome() + sep + "bin" + sep + "java "
+ gfxd;
break;
case windows:
cmd = "cmd /c "
+ "set GFXD_JAVA=" + hd.getJavaHome() + sep + "bin" + sep + "java.exe "
+ "&& cmd /c " + gfxd;
break;
}
return cmd + " "; // make it easy on users by adding the trailing space here
}
//------------------------------------------------------------------------------
// Names and Ids
//------------------------------------------------------------------------------
/**
* Returns the distributed system id for this JVM.
*/
public static Integer getDistributedSystemId() {
return getFabricServerDescription().getDistributedSystemId();
}
/**
* Returns the distributed system id for the given distributed system name.
*/
public static int getDistributedSystemId(String distributedSystemName) {
for (FabricServerDescription fsd :
GfxdTestConfig.getInstance().getFabricServerDescriptions().values()) {
if (fsd.getDistributedSystem().equals(distributedSystemName)) {
return fsd.getDistributedSystemId();
}
}
String s = "Distributed system not found: " + distributedSystemName;
throw new HydraRuntimeException(s);
}
/**
* Returns the distributed system name for this JVM.
*/
public static String getDistributedSystemName() {
return getFabricServerDescription().getDistributedSystem();
}
/**
* Returns the distributed system name for the given distributed system id.
*/
public static String getDistributedSystemName(int distributedSystemId) {
for (FabricServerDescription fsd :
GfxdTestConfig.getInstance().getFabricServerDescriptions().values()) {
if (fsd.getDistributedSystemId() == distributedSystemId) {
return fsd.getDistributedSystem();
}
}
String s = "Distributed system not found: " + distributedSystemId;
throw new HydraRuntimeException(s);
}
//------------------------------------------------------------------------------
// Properties
//------------------------------------------------------------------------------
/**
* Returns the boot properties from the {@link FabricServerDescription}
* to which this VM is wired using {@link FabricServerPrms#clientNames}.
*/
public static Properties getBootProperties() {
FabricServerDescription fsd = getFabricServerDescription();
return getBootProperties(fsd);
}
/**
* Returns the boot properties using the given description.
*/
public static synchronized Properties getBootProperties(
FabricServerDescription fsd) {
log.info("Looking up boot properties");
Properties p = fsd.getBootProperties();
log.info("Looked up boot properties: " + prettyprint(p));
return p;
}
/**
* Returns the shutdown properties from the {@link FabricServerDescription}
* to which this VM is wired using {@link FabricServerPrms#clientNames}.
*/
public static Properties getShutdownProperties() {
FabricServerDescription fsd = getFabricServerDescription();
return getShutdownProperties(fsd);
}
/**
* Returns the shutdown properties using the given description.
*/
private static synchronized Properties getShutdownProperties(
FabricServerDescription fsd) {
log.info("Looking up shutdown properties");
Properties p = fsd.getShutdownProperties();
log.info("Looked up shutdown properties: " + prettyprint(p));
return p;
}
/**
* Returns a string containing indented properties, one per line.
*/
private static String prettyprint(Properties p) {
List l = Collections.list(p.propertyNames());
SortedSet set = new TreeSet(l);
StringBuffer buf = new StringBuffer();
for (Iterator i = set.iterator(); i.hasNext();) {
String key = (String)i.next();
String val = p.getProperty(key);
buf.append("\n " + key + "=" + val);
}
return buf.toString();
}
//------------------------------------------------------------------------------
// FabricServerDescription
//------------------------------------------------------------------------------
/**
* Returns the {@link FabricServerDescription} to which this VM is wired
* using {@link FabricServerPrms#clientNames}. Caches the result.
*
* @throws HydraRuntimeException if no configuration is wired to this client,
*/
public static FabricServerDescription getFabricServerDescription() {
String clientName = RemoteTestModule.getMyClientName();
if (TheFabricServerDescription == null) {
log.info("Looking up fabric server config for " + clientName);
Map<String,FabricServerDescription> fsds =
GfxdTestConfig.getInstance().getFabricServerDescriptions();
for (FabricServerDescription fsd : fsds.values()) {
if (fsd.getClientNames().contains(clientName)) {
log.info("Looked up fabric server config for " + clientName + ":\n"
+ fsd);
TheFabricServerDescription = fsd; // cache it
break;
}
}
if (TheFabricServerDescription == null) {
String s = clientName + " is not wired to any fabric server description"
+ " using " + BasePrms.nameForKey(FabricServerPrms.clientNames)
+ ". Either add it or use an alternate method that takes a "
+ BasePrms.nameForKey(FabricServerPrms.names)
+ " argument.";
throw new HydraRuntimeException(s);
}
}
return TheFabricServerDescription;
}
//------------------------------------------------------------------------------
// Locator
//------------------------------------------------------------------------------
/**
* Creates a locator endpoint using the {@link FabricServerDescription}
* to which this VM is wired using {@link FabricServerPrms#clientNames}.
* The endpoint need only be created once.
* <p>
* During creation, selects a random port and registers the locator {@link
* Endpoint} in the {@link LocatorBlackboard} map. The locator will always
* use this port during startup.
* <p>
* All locators associated with a given distributed system must be created
* before locators are started, so that the locator list is complete.
* At least one locator must be started before non-locators can connect.
*
* @throws HydraRuntimeException if the VM already has a locator that did
* not create its endpoint using this method.
*/
public static synchronized void createLocator() {
Endpoint endpoint = findEndpoint();
Locator locator = DistributedSystemHelper.getLocator();
if (locator == null) {
if (endpoint == null) {
generateEndpoint(getFabricServerDescription());
} // else already created but not running
} else if (endpoint == null) {
String s = "Locator was already started without FabricServerHelper"
+ " using an unknown port";
throw new HydraRuntimeException(s);
} // else already created and running
// SHOULD ALSO CHECK RUNNING FABRIC SERVER HERE
}
/**
* Starts a peer locator using the previously created endpoint and the boot
* properties from its {@link FabricServerDescription}, if it is not already
* started.
* <p>
* This method is synchronized across hydra client VMs to ensure that one
* locator in each distributed system starts up before the others, to work
* around Bug 30341.
* <p>
* {@link hydra.DistributedSystemHelper#getLocator} can be used to access
* the underlying GFE locator, if needed.
*
* @throws HydraRuntimeException if a non-GemFireXD locator is already
* running in this VM.
*/
public static void startLocator() {
_startLocator(null, null);
}
/**
* Starts a server locator with the given network server configuration using
* the previously created endpoint and the boot properties from its {@link
* FabricServerDescription}, if it is not already started.
* <p>
* This method is synchronized across hydra client VMs to ensure that one
* locator in each distributed system starts up before the others, to work
* around Bug 30341.
* <p>
* {@link hydra.DistributedSystemHelper#getLocator} can be used to access
* the underlying GFE locator, if needed.
*
* @throws HydraRuntimeException if a non-GemFireXD locator is already
* running in this VM.
*/
public static void startLocator(String networkServerConfig) {
_startLocator(networkServerConfig, null);
}
/**
* Starts a server locator with the given network server configuration using
* the previously created endpoint and the given system user/password map,
* if it is not already started.
* <p>
* This method is synchronized across hydra client VMs to ensure that one
* locator in each distributed system starts up before the others, to work
* around Bug 30341.
* <p>
* {@link hydra.DistributedSystemHelper#getLocator} can be used to access
* the underlying GFE locator, if needed.
*
* @throws HydraRuntimeException if a non-GemFireXD locator is already
* running in this VM.
*/
public static void startLocator(String networkServerConfig,
Map<String, String> systemUserPasswords) {
_startLocator(networkServerConfig, systemUserPasswords);
}
private static synchronized void _startLocator(String networkServerConfig,
Map<String, String> systemUserPasswords) {
Endpoint endpoint = findEndpoint();
if (endpoint == null) {
String s = "Locator has not been created yet";
throw new HydraRuntimeException(s);
}
FabricServerDescription fsd = getFabricServerDescription();
Properties bootProps = fsd.getBootProperties();
Locator locator = DistributedSystemHelper.getLocator();
if (systemUserPasswords != null) {
for (Map.Entry<String, String> entry : systemUserPasswords.entrySet()) {
String systemUser = entry.getKey();
String password = entry.getValue();
bootProps.put(Property.USER_PROPERTY_PREFIX + systemUser, password);
}
}
bootProps.put(DistributionConfig.DISTRIBUTED_SYSTEM_ID_NAME,
fsd.getDistributedSystemId().toString());
bootProps.put(DistributionConfig.REMOTE_LOCATORS_NAME,
fsd.getRemoteLocators());
if (locator == null) {
DistributedSystem ds = DistributedSystemHelper.getDistributedSystem();
if (ds == null) {
SharedCounters counters = LocatorBlackboard.getInstance(
fsd.getDistributedSystem()).getSharedCounters();
while (counters.incrementAndRead(LocatorBlackboard.locatorLock) != 1) {
MasterController.sleepForMs(500);
}
log.info("Starting gemfirexd locator");
try {
log.info("Starting gemfirexd locator \"" + endpoint
+ "\" using boot properties: " + prettyprint(bootProps));
FabricServiceManager.getFabricLocatorInstance().start(
endpoint.getAddress(),
endpoint.getPort(),
bootProps);
if (networkServerConfig != null) {
NetworkServerHelper.startNetworkLocators(networkServerConfig);
}
} catch (SQLException e) {
String s = "Problem starting gemfirexd locator";
throw new HydraRuntimeException(s, e);
}
log.info("Started locator: " + locator);
TheFabricServerProperties = bootProps; // cache them
counters.zero(LocatorBlackboard.locatorLock); // let others proceed
} else {
String s = "This VM is already connected to a distributed system. "
+ "Too late to start a locator";
throw new HydraRuntimeException(s);
}
} else if (TheFabricServerProperties == null) {
// block attempt to start locator in multiple ways
String s = "Locator was already started without FabricServerHelper"
+ " using an unknown, and possibly different, configuration";
throw new HydraRuntimeException(s);
} else if (!TheFabricServerProperties.equals(bootProps)) {
// block attempt to connect to system with clashing configuration
String s = "Already booted using properties "
+ TheFabricServerProperties + ", cannot also use " + bootProps;
throw new HydraRuntimeException(s);
} else {
// make sure this it a running gemfirexd locator
FabricLocator loc = FabricServiceManager.getFabricLocatorInstance();
if (loc.status() != FabricService.State.RUNNING) {
String s = "This VM already contains a non-GemFireXD locator";
throw new HydraRuntimeException(s);
}
} // else it was already started with this configuration, which is fine
}
/**
* Stops the currently running locator, if it exists.
*/
public static synchronized void stopLocator() {
stopFabricServer();
}
//------------------------------------------------------------------------------
// Locator Endpoints
//------------------------------------------------------------------------------
/**
* Returns all peer locator endpoints from the {@link LocatorBlackboard}
* map, a possibly empty list. This includes all locators that have
* ever started, regardless of their distributed system or current active
* status.
*/
public static synchronized List<Endpoint> getEndpoints() {
List<Endpoint> endpoints = new ArrayList();
Collection c = LocatorBlackboard.getInstance().getSharedMap().getMap()
.values();
endpoints.addAll(c);
return endpoints;
}
/**
* Returns all peer locator endpoints for the distributed system from the
* {@link FabricServerDescription} to which this VM is wired using {@link
* FabricServerPrms#clientNames}.
* <p>
* The endpoints are looked up in the {@link LocatorBlackboard} map, a
* possibly empty list. This includes all locators that have ever
* started for this system, regardless of their current active status.
*/
public static synchronized List<Endpoint> getSystemEndpoints() {
FabricServerDescription fsd = getFabricServerDescription();
String distributedSystemName = fsd.getDistributedSystem();
return getEndpoints(distributedSystemName);
}
/**
* Returns all peer locator endpoints for the specified distributed system
* from the {@link LocatorBlackboard} map, a possibly empty list. This
* includes all locators that have ever started for the system,
* regardless of their current active status.
*/
public static synchronized List<Endpoint> getEndpoints(String distributedSystemName) {
List<Endpoint> endpoints = new ArrayList();
for (Iterator i = getEndpoints().iterator(); i.hasNext();) {
Endpoint endpoint = (Endpoint)i.next();
if (endpoint.getDistributedSystemName().equals(distributedSystemName)) {
endpoints.add(endpoint);
}
}
return endpoints;
}
/**
* Returns all peer locator endpoints for the specified distributed systems
* from the {@link LocatorBlackboard} map, a possibly empty list. This
* includes all locators that have ever started for the systems,
* regardless of their current active status.
*/
public static synchronized List<Endpoint> getEndpoints(List<String> distributedSystemNames) {
List<Endpoint> endpoints = new ArrayList();
for (String distributedSystemName : distributedSystemNames) {
endpoints.addAll(getEndpoints(distributedSystemName));
}
return endpoints;
}
/**
* Finds the peer locator endpoint for this VM in the shared {@link
* LocatorBlackboard} map, if it exists. Caches the result.
*/
private static synchronized Endpoint findEndpoint() {
if (TheLocatorEndpoint == null) {
Integer vmid = RemoteTestModule.getMyVmid();
TheLocatorEndpoint = (Endpoint)LocatorBlackboard.getInstance()
.getSharedMap().get(vmid);
}
return TheLocatorEndpoint;
}
/**
* Generates a peer locator endpoint with a random port for the given fabric
* server description and stores it in the shared {@link LocatorBlackboard}
* map. Caches the result.
*/
private static synchronized Endpoint generateEndpoint(
FabricServerDescription fsd) {
Endpoint endpoint = findEndpoint();
if (endpoint == null) {
log.info("Generating peer locator endpoint");
Integer vmid = RemoteTestModule.getMyVmid();
String name = RemoteTestModule.getMyClientName();
String host = HostHelper.getCanonicalHostName();
String addr = HostHelper.getHostAddress();
int port = PortHelper.getRandomPort();
String id = addr + "[" + port + "]";
String ds = fsd.getDistributedSystem();
endpoint = new Endpoint(id, name, vmid.intValue(), host, addr, port, ds);
log.info("Generated peer locator endpoint: " + endpoint);
LocatorBlackboard.getInstance().getSharedMap().put(vmid, endpoint);
}
TheLocatorEndpoint = endpoint; // cache it
return endpoint;
}
/**
* Represents the endpoint for a peer locator.
*/
public static class Endpoint implements Serializable {
String id, name, host, addr, ds;
int vmid, port;
/**
* Creates an endpoint for a locator.
*
* @param id the locator id.
* @param name the logical hydra client VM name from {@link
* ClientPrms#names} found via the {@link
* ClientPrms#CLIENT_NAME_PROPERTY} system property.
* @param vmid the logical hydra client VM ID found via {@link
* RemoteTestModule#getMyVmid}.
* @param host the locator host.
* @param addr the locator address.
* @param port the locator port.
* @param ds the locator distributed system name.
*/
public Endpoint(String id, String name, int vmid,
String host, String addr, int port, String ds) {
if (id == null) {
throw new IllegalArgumentException("id cannot be null");
}
if (name == null) {
throw new IllegalArgumentException("name cannot be null");
}
if (host == null) {
throw new IllegalArgumentException("host cannot be null");
}
if (addr == null) {
throw new IllegalArgumentException("addr cannot be null");
}
if (ds == null) {
throw new IllegalArgumentException("ds cannot be null");
}
this.id = id;
this.name = name;
this.vmid = vmid;
this.host = host;
this.addr = addr;
this.port = port;
this.ds = ds;
}
/**
* Returns the unique locator logical endpoint ID.
*/
public String getId() {
return this.id;
}
/**
* Returns the locator logical VM name.
*/
public String getName() {
return this.name;
}
/**
* Returns the locator logical VM ID.
*/
public int getVmid() {
return this.vmid;
}
/**
* Returns the locator host.
*/
public String getHost() {
return this.host;
}
/**
* Returns the locator address.
*/
public String getAddress() {
return this.addr;
}
/**
* Returns the locator port.
*/
public int getPort() {
return this.port;
}
/**
* Returns the locator distributed system name.
*/
public String getDistributedSystemName() {
return this.ds;
}
public boolean equals(Object obj) {
if (obj instanceof Endpoint) {
Endpoint endpoint = (Endpoint)obj;
return endpoint.getId().equals(this.getId())
&& endpoint.getName().equals(this.getName())
&& endpoint.getVmid() == this.getVmid()
&& endpoint.getHost().equals(this.getHost())
&& endpoint.getAddress().equals(this.getAddress())
&& endpoint.getPort() == this.getPort()
&& endpoint.getDistributedSystemName()
.equals(this.getDistributedSystemName());
}
return false;
}
public int hashCode() {
return this.port;
}
/**
* Returns the endpoint as a string.
*/
public String toString() {
return this.id + "(" + this.ds + ":vm_" + this.vmid + "_" + this.name
+ "_" + this.host + ")";
}
}
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.android;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.FileProvider;
import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.TransitiveInfoCollection;
import com.google.devtools.build.lib.analysis.actions.FileWriteAction;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.config.CompilationMode;
import com.google.devtools.build.lib.rules.android.AndroidResourcesProvider.ResourceContainer;
import com.google.devtools.build.lib.rules.android.AndroidResourcesProvider.ResourceType;
import com.google.devtools.build.lib.rules.android.LocalResourceContainer.Builder.InvalidAssetPath;
import com.google.devtools.build.lib.rules.android.LocalResourceContainer.Builder.InvalidResourcePath;
import com.google.devtools.build.lib.syntax.Type;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.List;
/** Represents a AndroidManifest, that may have been merged from dependencies. */
public final class ApplicationManifest {
public static ApplicationManifest fromResourcesRule(RuleContext ruleContext) {
final AndroidResourcesProvider resources = AndroidCommon.getAndroidResources(ruleContext);
if (resources == null) {
ruleContext.attributeError("manifest",
"a resources or manifest attribute is mandatory.");
throw new RuleConfigurationException();
}
return new ApplicationManifest(Iterables.getOnlyElement(
resources.getDirectAndroidResources())
.getManifest());
}
public ApplicationManifest createSplitManifest(
RuleContext ruleContext, String splitName, boolean hasCode) {
// aapt insists that manifests be called AndroidManifest.xml, even though they have to be
// explicitly designated as manifests on the command line
Artifact result = AndroidBinary.getDxArtifact(
ruleContext, "split_" + splitName + "/AndroidManifest.xml");
SpawnAction.Builder builder = new SpawnAction.Builder()
.setExecutable(ruleContext.getExecutablePrerequisite("$build_split_manifest", Mode.HOST))
.setProgressMessage("Creating manifest for split " + splitName)
.setMnemonic("AndroidBuildSplitManifest")
.addArgument("--main_manifest")
.addInputArgument(manifest)
.addArgument("--split_manifest")
.addOutputArgument(result)
.addArgument("--split")
.addArgument(splitName)
.addArgument(hasCode ? "--hascode" : "--nohascode");
String overridePackage = getOverridePackage(ruleContext);
if (overridePackage != null) {
builder
.addArgument("--override_package")
.addArgument(overridePackage);
}
ruleContext.registerAction(builder.build(ruleContext));
return new ApplicationManifest(result);
}
private String getOverridePackage(RuleContext ruleContext) {
// It seems that we sometimes rename the app for God-knows-what reason. If that is the case,
// pass this information to the stubifier script.
if (ruleContext.attributes().isAttributeValueExplicitlySpecified("application_id")) {
return ruleContext.attributes().get("application_id", Type.STRING);
}
AndroidResourcesProvider resourcesProvider = AndroidCommon.getAndroidResources(ruleContext);
if (resourcesProvider != null) {
ResourceContainer resourceContainer = Iterables.getOnlyElement(
resourcesProvider.getDirectAndroidResources());
return resourceContainer.getRenameManifestPackage();
} else {
return null;
}
}
public ApplicationManifest addStubApplication(RuleContext ruleContext)
throws InterruptedException {
Artifact stubManifest =
ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.STUB_APPLICATON_MANIFEST);
SpawnAction.Builder builder = new SpawnAction.Builder()
.setExecutable(ruleContext.getExecutablePrerequisite("$stubify_manifest", Mode.HOST))
.setProgressMessage("Injecting stub application")
.setMnemonic("InjectStubApplication")
.addArgument("--input_manifest")
.addInputArgument(manifest)
.addArgument("--output_manifest")
.addOutputArgument(stubManifest)
.addArgument("--output_datafile")
.addOutputArgument(
ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.STUB_APPLICATION_DATA));
String overridePackage = getOverridePackage(ruleContext);
if (overridePackage != null) {
builder.addArgument("--override_package");
builder.addArgument(overridePackage);
}
ruleContext.registerAction(builder.build(ruleContext));
return new ApplicationManifest(stubManifest);
}
public static ApplicationManifest fromRule(RuleContext ruleContext) {
return new ApplicationManifest(ruleContext.getPrerequisiteArtifact("manifest", Mode.TARGET));
}
public static ApplicationManifest fromExplicitManifest(Artifact manifest) {
return new ApplicationManifest(manifest);
}
/**
* Generates an empty manifest for a rule that does not directly specify resources.
*
* <p><strong>Note:</strong> This generated manifest can then be used as the primary manifest
* when merging with dependencies.
*
* @return the generated ApplicationManifest
*/
public static ApplicationManifest generatedManifest(RuleContext ruleContext) {
Artifact generatedManifest = ruleContext.getUniqueDirectoryArtifact(
ruleContext.getRule().getName() + "_generated", new PathFragment("AndroidManifest.xml"),
ruleContext.getBinOrGenfilesDirectory());
String manifestPackage = AndroidCommon.getJavaPackage(ruleContext);
String contents = Joiner.on("\n").join(
"<?xml version=\"1.0\" encoding=\"utf-8\"?>",
"<manifest xmlns:android=\"http://schemas.android.com/apk/res/android\"",
" package=\"" + manifestPackage + "\">",
" <application>",
" </application>",
"</manifest>");
ruleContext.getAnalysisEnvironment().registerAction(new FileWriteAction(
ruleContext.getActionOwner(), generatedManifest, contents, false /* makeExecutable */));
return new ApplicationManifest(generatedManifest);
}
private final Artifact manifest;
private ApplicationManifest(Artifact manifest) {
this.manifest = manifest;
}
public ApplicationManifest mergeWith(RuleContext ruleContext,
ResourceDependencies resourceDeps) {
Iterable<Artifact> mergeeManifests = getMergeeManifests(resourceDeps.getResources());
if (!Iterables.isEmpty(mergeeManifests)) {
Iterable<Artifact> exportedManifests = mergeeManifests;
Artifact outputManifest = ruleContext.getUniqueDirectoryArtifact(
ruleContext.getRule().getName() + "_merged", "AndroidManifest.xml",
ruleContext.getBinOrGenfilesDirectory());
AndroidManifestMergeHelper.createMergeManifestAction(ruleContext, getManifest(),
exportedManifests, ImmutableList.of("all"), outputManifest);
return new ApplicationManifest(outputManifest);
}
return this;
}
private static Iterable<Artifact> getMergeeManifests(
Iterable<ResourceContainer> resourceContainers) {
ImmutableSortedSet.Builder<Artifact> builder =
ImmutableSortedSet.orderedBy(Artifact.EXEC_PATH_COMPARATOR);
for (ResourceContainer r : resourceContainers) {
if (r.isManifestExported()) {
builder.add(r.getManifest());
}
}
return builder.build();
}
/** Packages up the manifest with assets from the rule and dependent resources.
* @throws InterruptedException */
public ResourceApk packWithAssets(
Artifact resourceApk,
RuleContext ruleContext,
ResourceDependencies resourceDeps,
Artifact rTxt,
boolean incremental,
Artifact proguardCfg) throws InterruptedException {
try {
LocalResourceContainer data = new LocalResourceContainer.Builder()
.withAssets(
AndroidCommon.getAssetDir(ruleContext),
ruleContext.getPrerequisites(
// TODO(bazel-team): Remove the ResourceType construct.
ResourceType.ASSETS.getAttribute(),
Mode.TARGET,
FileProvider.class)).build();
return createApk(resourceApk,
ruleContext,
resourceDeps,
rTxt,
null, /* configurationFilters */
ImmutableList.<String>of(), /* uncompressedExtensions */
ImmutableList.<String>of(), /* densities */
ImmutableList.<String>of(), /* String applicationId */
null, /* String versionCode */
null, /* String versionName */
null, /* Artifact symbolsTxt */
incremental,
data,
proguardCfg);
} catch (InvalidAssetPath e) {
ruleContext.attributeError(ResourceType.ASSETS.getAttribute(), e.getMessage());
throw new RuleConfigurationException();
}
}
/** Packages up the manifest with resource and assets from the rule and dependent resources.
* @throws InterruptedException */
public ResourceApk packWithDataAndResources(
Artifact resourceApk,
RuleContext ruleContext,
ResourceDependencies resourceDeps,
Artifact rTxt,
Artifact symbolsTxt,
List<String> configurationFilters,
List<String> uncompressedExtensions,
List<String> densities,
String applicationId,
String versionCode,
String versionName,
boolean incremental, Artifact proguardCfg) throws InterruptedException {
try {
LocalResourceContainer data = new LocalResourceContainer.Builder()
.withAssets(
AndroidCommon.getAssetDir(ruleContext),
ruleContext.getPrerequisites(
// TODO(bazel-team): Remove the ResourceType construct.
ResourceType.ASSETS.getAttribute(),
Mode.TARGET,
FileProvider.class))
.withResources(
ruleContext.getPrerequisites(
"resource_files",
Mode.TARGET,
FileProvider.class)).build();
return createApk(resourceApk,
ruleContext,
resourceDeps,
rTxt,
symbolsTxt,
configurationFilters,
uncompressedExtensions,
densities,
applicationId,
versionCode,
versionName,
incremental,
data,
proguardCfg);
} catch (InvalidAssetPath e) {
ruleContext.attributeError(ResourceType.ASSETS.getAttribute(), e.getMessage());
throw new RuleConfigurationException();
} catch (InvalidResourcePath e) {
ruleContext.attributeError("resource_files", e.getMessage());
throw new RuleConfigurationException();
}
}
private ResourceApk createApk(Artifact resourceApk,
RuleContext ruleContext,
ResourceDependencies resourceDeps,
Artifact rTxt,
Artifact symbolsTxt,
List<String> configurationFilters,
List<String> uncompressedExtensions,
List<String> densities,
String applicationId,
String versionCode,
String versionName,
boolean incremental,
LocalResourceContainer data, Artifact proguardCfg) throws InterruptedException {
ResourceContainer resourceContainer = checkForInlinedResources(
new AndroidResourceContainerBuilder()
.withData(data)
.withManifest(getManifest())
.withROutput(rTxt)
.withSymbolsFile(symbolsTxt)
.buildFromRule(ruleContext, resourceApk),
resourceDeps.getResources(), // TODO(bazel-team): Figure out if we really need to check
// the ENTIRE transitive closure, or just the direct dependencies. Given that each rule with
// resources would check for inline resources, we can rely on the previous rule to have
// checked its dependencies.
ruleContext);
AndroidResourcesProcessorBuilder builder =
new AndroidResourcesProcessorBuilder(ruleContext)
.setApkOut(resourceContainer.getApk())
.setConfigurationFilters(configurationFilters)
.setUncompressedExtensions(uncompressedExtensions)
.setJavaPackage(resourceContainer.getJavaPackage())
.setDebug(ruleContext.getConfiguration().getCompilationMode() != CompilationMode.OPT)
.withPrimary(resourceContainer)
.withDependencies(resourceDeps)
.setDensities(densities)
.setProguardOut(proguardCfg)
.setApplicationId(applicationId)
.setVersionCode(versionCode)
.setVersionName(versionName);
if (!incremental) {
builder
.setRTxtOut(resourceContainer.getRTxt())
.setSymbolsTxt(resourceContainer.getSymbolsTxt())
.setSourceJarOut(resourceContainer.getJavaSourceJar());
}
ResourceContainer processed = builder.build(ruleContext);
return new ResourceApk(
resourceApk, processed.getJavaSourceJar(), resourceDeps, processed, manifest,
proguardCfg, false);
}
private static ResourceContainer checkForInlinedResources(ResourceContainer resourceContainer,
Iterable<ResourceContainer> resourceContainers, RuleContext ruleContext) {
// Dealing with Android library projects
if (Iterables.size(resourceContainers) > 1) {
if (resourceContainer.getConstantsInlined()
&& !resourceContainer.getArtifacts(ResourceType.RESOURCES).isEmpty()) {
ruleContext.ruleError("This android binary depends on an android "
+ "library project, so the resources '"
+ AndroidCommon.getAndroidResources(ruleContext).getLabel()
+ "' should have the attribute inline_constants set to 0");
throw new RuleConfigurationException();
}
}
return resourceContainer;
}
/** Uses the resource apk from the resources attribute, as opposed to recompiling. */
public ResourceApk useCurrentResources(RuleContext ruleContext, Artifact proguardCfg) {
ResourceContainer resourceContainer = Iterables.getOnlyElement(
AndroidCommon.getAndroidResources(ruleContext).getDirectAndroidResources());
new AndroidAaptActionHelper(
ruleContext,
resourceContainer.getManifest(),
Lists.newArrayList(resourceContainer)).createGenerateProguardAction(proguardCfg);
return new ResourceApk(
resourceContainer.getApk(),
null /* javaSrcJar */,
ResourceDependencies.empty(),
resourceContainer,
manifest,
proguardCfg,
false);
}
/**
* Packages up the manifest with resources, and generates the R.java.
* @throws InterruptedException
*
* @deprecated in favor of {@link ApplicationManifest#packWithDataAndResources}.
*/
@Deprecated
public ResourceApk packWithResources(
Artifact resourceApk,
RuleContext ruleContext,
ResourceDependencies resourceDeps,
boolean createSource,
Artifact proguardCfg) throws InterruptedException {
TransitiveInfoCollection resourcesPrerequisite =
ruleContext.getPrerequisite("resources", Mode.TARGET);
ResourceContainer resourceContainer = Iterables.getOnlyElement(
resourcesPrerequisite.getProvider(AndroidResourcesProvider.class)
.getDirectAndroidResources());
// It's ugly, but flattening now is more performant given the rest of the checks.
List<ResourceContainer> resourceContainers =
ImmutableList.<ResourceContainer>builder()
//.add(resourceContainer)
.addAll(resourceDeps.getResources()).build();
// Dealing with Android library projects
if (Iterables.size(resourceDeps.getResources()) > 1) {
if (resourceContainer.getConstantsInlined()
&& !resourceContainer.getArtifacts(ResourceType.RESOURCES).isEmpty()) {
ruleContext.ruleError("This android_binary depends on an android_library, so the"
+ " resources '" + AndroidCommon.getAndroidResources(ruleContext).getLabel()
+ "' should have the attribute inline_constants set to 0");
throw new RuleConfigurationException();
}
}
// This binary depends on a library project, so we need to regenerate the
// resources. The resulting sources and apk will combine all the resources
// contained in the transitive closure of the binary.
AndroidAaptActionHelper aaptActionHelper = new AndroidAaptActionHelper(ruleContext,
getManifest(), Lists.newArrayList(resourceContainers));
List<String> resourceConfigurationFilters =
ruleContext.getTokenizedStringListAttr("resource_configuration_filters");
List<String> uncompressedExtensions =
ruleContext.getTokenizedStringListAttr("nocompress_extensions");
ImmutableList.Builder<String> additionalAaptOpts = ImmutableList.<String>builder();
for (String extension : uncompressedExtensions) {
additionalAaptOpts.add("-0").add(extension);
}
if (!resourceConfigurationFilters.isEmpty()) {
additionalAaptOpts.add("-c").add(Joiner.on(",").join(resourceConfigurationFilters));
}
Artifact javaSourcesJar = null;
if (createSource) {
javaSourcesJar =
ruleContext.getImplicitOutputArtifact(AndroidRuleClasses.ANDROID_JAVA_SOURCE_JAR);
aaptActionHelper.createGenerateResourceSymbolsAction(
javaSourcesJar, null, resourceContainer.getJavaPackage(), true);
}
List<String> densities = ruleContext.getTokenizedStringListAttr("densities");
aaptActionHelper.createGenerateApkAction(resourceApk,
resourceContainer.getRenameManifestPackage(), additionalAaptOpts.build(), densities);
ResourceContainer updatedResources = new ResourceContainer(
ruleContext.getLabel(),
resourceContainer.getJavaPackage(),
resourceContainer.getRenameManifestPackage(),
resourceContainer.getConstantsInlined(),
resourceApk,
getManifest(),
javaSourcesJar,
resourceContainer.getArtifacts(ResourceType.ASSETS),
resourceContainer.getArtifacts(ResourceType.RESOURCES),
resourceContainer.getRoots(ResourceType.ASSETS),
resourceContainer.getRoots(ResourceType.RESOURCES),
resourceContainer.isManifestExported(),
resourceContainer.getRTxt(), null);
aaptActionHelper.createGenerateProguardAction(proguardCfg);
return new ResourceApk(resourceApk, updatedResources.getJavaSourceJar(),
resourceDeps, updatedResources, manifest, proguardCfg, true);
}
public Artifact getManifest() {
return manifest;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.sagemaker.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Shows the final value for the objective metric for a training job that was launched by a hyperparameter tuning job.
* You define the objective metric in the <code>HyperParameterTuningJobObjective</code> parameter of
* <a>HyperParameterTuningJobConfig</a>.
* </p>
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/sagemaker-2017-07-24/FinalHyperParameterTuningJobObjectiveMetric"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class FinalHyperParameterTuningJobObjectiveMetric implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.
* </p>
*/
private String type;
/**
* <p>
* The name of the objective metric.
* </p>
*/
private String metricName;
/**
* <p>
* The value of the objective metric.
* </p>
*/
private Float value;
/**
* <p>
* Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.
* </p>
*
* @param type
* Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.
* @see HyperParameterTuningJobObjectiveType
*/
public void setType(String type) {
this.type = type;
}
/**
* <p>
* Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.
* </p>
*
* @return Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.
* @see HyperParameterTuningJobObjectiveType
*/
public String getType() {
return this.type;
}
/**
* <p>
* Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.
* </p>
*
* @param type
* Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.
* @return Returns a reference to this object so that method calls can be chained together.
* @see HyperParameterTuningJobObjectiveType
*/
public FinalHyperParameterTuningJobObjectiveMetric withType(String type) {
setType(type);
return this;
}
/**
* <p>
* Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.
* </p>
*
* @param type
* Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.
* @return Returns a reference to this object so that method calls can be chained together.
* @see HyperParameterTuningJobObjectiveType
*/
public FinalHyperParameterTuningJobObjectiveMetric withType(HyperParameterTuningJobObjectiveType type) {
this.type = type.toString();
return this;
}
/**
* <p>
* The name of the objective metric.
* </p>
*
* @param metricName
* The name of the objective metric.
*/
public void setMetricName(String metricName) {
this.metricName = metricName;
}
/**
* <p>
* The name of the objective metric.
* </p>
*
* @return The name of the objective metric.
*/
public String getMetricName() {
return this.metricName;
}
/**
* <p>
* The name of the objective metric.
* </p>
*
* @param metricName
* The name of the objective metric.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public FinalHyperParameterTuningJobObjectiveMetric withMetricName(String metricName) {
setMetricName(metricName);
return this;
}
/**
* <p>
* The value of the objective metric.
* </p>
*
* @param value
* The value of the objective metric.
*/
public void setValue(Float value) {
this.value = value;
}
/**
* <p>
* The value of the objective metric.
* </p>
*
* @return The value of the objective metric.
*/
public Float getValue() {
return this.value;
}
/**
* <p>
* The value of the objective metric.
* </p>
*
* @param value
* The value of the objective metric.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public FinalHyperParameterTuningJobObjectiveMetric withValue(Float value) {
setValue(value);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getType() != null)
sb.append("Type: ").append(getType()).append(",");
if (getMetricName() != null)
sb.append("MetricName: ").append(getMetricName()).append(",");
if (getValue() != null)
sb.append("Value: ").append(getValue());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof FinalHyperParameterTuningJobObjectiveMetric == false)
return false;
FinalHyperParameterTuningJobObjectiveMetric other = (FinalHyperParameterTuningJobObjectiveMetric) obj;
if (other.getType() == null ^ this.getType() == null)
return false;
if (other.getType() != null && other.getType().equals(this.getType()) == false)
return false;
if (other.getMetricName() == null ^ this.getMetricName() == null)
return false;
if (other.getMetricName() != null && other.getMetricName().equals(this.getMetricName()) == false)
return false;
if (other.getValue() == null ^ this.getValue() == null)
return false;
if (other.getValue() != null && other.getValue().equals(this.getValue()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode());
hashCode = prime * hashCode + ((getMetricName() == null) ? 0 : getMetricName().hashCode());
hashCode = prime * hashCode + ((getValue() == null) ? 0 : getValue().hashCode());
return hashCode;
}
@Override
public FinalHyperParameterTuningJobObjectiveMetric clone() {
try {
return (FinalHyperParameterTuningJobObjectiveMetric) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.sagemaker.model.transform.FinalHyperParameterTuningJobObjectiveMetricMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2012 - 2015 Manuel Laggner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tinymediamanager.core.movie;
import java.util.ArrayList;
import java.util.List;
import java.util.ListIterator;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import org.jdesktop.observablecollections.ObservableCollections;
import org.tinymediamanager.core.AbstractModelObject;
import org.tinymediamanager.core.movie.connector.MovieConnectors;
import org.tinymediamanager.scraper.CountryCode;
import org.tinymediamanager.scraper.MediaArtwork.FanartSizes;
import org.tinymediamanager.scraper.MediaArtwork.PosterSizes;
import org.tinymediamanager.scraper.MediaLanguages;
/**
* The Class MovieSettings.
*/
@XmlRootElement(name = "MovieSettings")
public class MovieSettings extends AbstractModelObject {
private final static String PATH = "path";
private final static String FILENAME = "filename";
private final static String MOVIE_DATA_SOURCE = "movieDataSource";
private final static String IMAGE_POSTER_SIZE = "imagePosterSize";
private final static String IMAGE_FANART_SIZE = "imageFanartSize";
private final static String IMAGE_EXTRATHUMBS = "imageExtraThumbs";
private final static String IMAGE_EXTRATHUMBS_RESIZE = "imageExtraThumbsResize";
private final static String IMAGE_EXTRATHUMBS_SIZE = "imageExtraThumbsSize";
private final static String IMAGE_EXTRATHUMBS_COUNT = "imageExtraThumbsCount";
private final static String IMAGE_EXTRAFANART = "imageExtraFanart";
private final static String IMAGE_EXTRAFANART_COUNT = "imageExtraFanartCount";
private final static String ENABLE_MOVIESET_ARTWORK_MOVIE_FOLDER = "enableMovieSetArtworkMovieFolder";
private final static String ENABLE_MOVIESET_ARTWORK_FOLDER = "enableMovieSetArtworkFolder";
private final static String MOVIESET_ARTWORK_FOLDER = "movieSetArtworkFolder";
private final static String MOVIE_CONNECTOR = "movieConnector";
private final static String MOVIE_NFO_FILENAME = "movieNfoFilename";
private final static String MOVIE_POSTER_FILENAME = "moviePosterFilename";
private final static String MOVIE_FANART_FILENAME = "movieFanartFilename";
private final static String MOVIE_RENAMER_PATHNAME = "movieRenamerPathname";
private final static String MOVIE_RENAMER_FILENAME = "movieRenamerFilename";
private final static String MOVIE_RENAMER_SPACE_SUBSTITUTION = "movieRenamerSpaceSubstitution";
private final static String MOVIE_RENAMER_SPACE_REPLACEMENT = "movieRenamerSpaceReplacement";
private final static String MOVIE_RENAMER_NFO_CLEANUP = "movieRenamerNfoCleanup";
private final static String MOVIE_RENAMER_MOVIESET_SINGLE_MOVIE = "movieRenamerMoviesetSingleMovie";
private final static String MOVIE_SCRAPER = "movieScraper";
private final static String SCRAPE_BEST_IMAGE = "scrapeBestImage";
private final static String IMAGE_SCRAPER_TMDB = "imageScraperTmdb";
private final static String IMAGE_SCRAPER_FANART_TV = "imageScraperFanartTv";
private final static String TRAILER_SCRAPER_TMDB = "trailerScraperTmdb";
private final static String TRAILER_SCRAPER_HD_TRAILERS = "trailerScraperHdTrailers";
private final static String TRAILER_SCRAPER_OFDB = "trailerScraperOfdb";
private final static String WRITE_ACTOR_IMAGES = "writeActorImages";
private final static String IMDB_SCRAPE_FOREIGN_LANGU = "imdbScrapeForeignLanguage";
private final static String SCRAPER_LANGU = "scraperLanguage";
private final static String CERTIFICATION_COUNTRY = "certificationCountry";
private final static String SCRAPER_THRESHOLD = "scraperThreshold";
private final static String DETECT_MOVIE_MULTI_DIR = "detectMovieMultiDir";
private final static String BUILD_IMAGE_CACHE_ON_IMPORT = "buildImageCacheOnImport";
private final static String BAD_WORDS = "badWords";
private final static String ENTRY = "entry";
private final static String RUNTIME_FROM_MI = "runtimeFromMediaInfo";
private final static String ASCII_REPLACEMENT = "asciiReplacement";
private final static String YEAR_COLUMN_VISIBLE = "yearColumnVisible";
private final static String NFO_COLUMN_VISIBLE = "nfoColumnVisible";
private final static String IMAGE_COLUMN_VISIBLE = "imageColumnVisible";
private final static String TRAILER_COLUMN_VISIBLE = "trailerColumnVisible";
private final static String SUBTITLE_COLUMN_VISIBLE = "subtitleColumnVisible";
private final static String WATCHED_COLUMN_VISIBLE = "watchedColumnVisible";
private final static String SCRAPER_FALLBACK = "scraperFallback";
@XmlElementWrapper(name = MOVIE_DATA_SOURCE)
@XmlElement(name = PATH)
private final List<String> movieDataSources = ObservableCollections.observableList(new ArrayList<String>());
@XmlElementWrapper(name = MOVIE_NFO_FILENAME)
@XmlElement(name = FILENAME)
private final List<MovieNfoNaming> movieNfoFilenames = new ArrayList<MovieNfoNaming>();
@XmlElementWrapper(name = MOVIE_POSTER_FILENAME)
@XmlElement(name = FILENAME)
private final List<MoviePosterNaming> moviePosterFilenames = new ArrayList<MoviePosterNaming>();
@XmlElementWrapper(name = MOVIE_FANART_FILENAME)
@XmlElement(name = FILENAME)
private final List<MovieFanartNaming> movieFanartFilenames = new ArrayList<MovieFanartNaming>();
@XmlElementWrapper(name = BAD_WORDS)
@XmlElement(name = ENTRY)
private final List<String> badWords = ObservableCollections.observableList(new ArrayList<String>());
private MovieConnectors movieConnector = MovieConnectors.XBMC;
private String movieRenamerPathname = "$T ($Y)";
private String movieRenamerFilename = "$T ($Y) $V $A";
private boolean movieRenamerSpaceSubstitution = false;
private String movieRenamerSpaceReplacement = "_";
private boolean movieRenamerNfoCleanup = false;
private boolean imdbScrapeForeignLanguage = false;
private MovieScrapers movieScraper = MovieScrapers.TMDB;
private PosterSizes imagePosterSize = PosterSizes.BIG;
private boolean imageScraperTmdb = true;
private boolean imageScraperFanartTv = true;
private FanartSizes imageFanartSize = FanartSizes.LARGE;
private boolean imageExtraThumbs = false;
private boolean imageExtraThumbsResize = true;
private int imageExtraThumbsSize = 300;
private int imageExtraThumbsCount = 5;
private boolean imageExtraFanart = false;
private int imageExtraFanartCount = 5;
private boolean enableMovieSetArtworkMovieFolder = true;
private boolean enableMovieSetArtworkFolder = false;
private String movieSetArtworkFolder = "MoviesetArtwork";
private boolean scrapeBestImage = true;
private boolean imageLanguagePriority = true;
private boolean imageLogo = false;
private boolean imageBanner = false;
private boolean imageClearart = false;
private boolean imageDiscart = false;
private boolean imageThumb = false;
private boolean trailerScraperTmdb = true;
private boolean trailerScraperHdTrailers = true;
private boolean trailerScraperOfdb = true;
private boolean writeActorImages = false;
private MediaLanguages scraperLanguage = MediaLanguages.en;
private CountryCode certificationCountry = CountryCode.US;
private double scraperThreshold = 0.75;
private boolean detectMovieMultiDir = false;
private boolean buildImageCacheOnImport = false;
private boolean movieRenamerCreateMoviesetForSingleMovie = false;
private boolean runtimeFromMediaInfo = false;
private boolean asciiReplacement = false;
private boolean yearColumnVisible = true;
private boolean ratingColumnVisible = true;
private boolean nfoColumnVisible = true;
private boolean imageColumnVisible = true;
private boolean trailerColumnVisible = true;
private boolean subtitleColumnVisible = true;
private boolean watchedColumnVisible = true;
private boolean scraperFallback = false;
private boolean useTrailerPreference = false;
private MovieTrailerQuality trailerQuality = MovieTrailerQuality.HD_720;
private MovieTrailerSources trailerSource = MovieTrailerSources.YOUTUBE;
private boolean syncTrakt = false;
public MovieSettings() {
}
public void addMovieDataSources(String path) {
if (!movieDataSources.contains(path)) {
movieDataSources.add(path);
firePropertyChange(MOVIE_DATA_SOURCE, null, movieDataSources);
}
}
public void removeMovieDataSources(String path) {
MovieList movieList = MovieList.getInstance();
movieList.removeDatasource(path);
movieDataSources.remove(path);
firePropertyChange(MOVIE_DATA_SOURCE, null, movieDataSources);
}
public List<String> getMovieDataSource() {
return movieDataSources;
}
public void addMovieNfoFilename(MovieNfoNaming filename) {
if (!movieNfoFilenames.contains(filename)) {
movieNfoFilenames.add(filename);
firePropertyChange(MOVIE_NFO_FILENAME, null, movieNfoFilenames);
}
}
public void removeMovieNfoFilename(MovieNfoNaming filename) {
if (movieNfoFilenames.contains(filename)) {
movieNfoFilenames.remove(filename);
firePropertyChange(MOVIE_NFO_FILENAME, null, movieNfoFilenames);
}
}
public void clearMovieNfoFilenames() {
movieNfoFilenames.clear();
firePropertyChange(MOVIE_NFO_FILENAME, null, movieNfoFilenames);
}
public List<MovieNfoNaming> getMovieNfoFilenames() {
return new ArrayList<MovieNfoNaming>(this.movieNfoFilenames);
}
public void addMoviePosterFilename(MoviePosterNaming filename) {
if (!moviePosterFilenames.contains(filename)) {
moviePosterFilenames.add(filename);
firePropertyChange(MOVIE_POSTER_FILENAME, null, moviePosterFilenames);
}
}
public void removeMoviePosterFilename(MoviePosterNaming filename) {
if (moviePosterFilenames.contains(filename)) {
moviePosterFilenames.remove(filename);
firePropertyChange(MOVIE_POSTER_FILENAME, null, moviePosterFilenames);
}
}
public void clearMoviePosterFilenames() {
moviePosterFilenames.clear();
firePropertyChange(MOVIE_POSTER_FILENAME, null, moviePosterFilenames);
}
public List<MoviePosterNaming> getMoviePosterFilenames() {
return new ArrayList<MoviePosterNaming>(this.moviePosterFilenames);
}
public void addMovieFanartFilename(MovieFanartNaming filename) {
if (!movieFanartFilenames.contains(filename)) {
movieFanartFilenames.add(filename);
firePropertyChange(MOVIE_FANART_FILENAME, null, movieFanartFilenames);
}
}
public void removeMovieFanartFilename(MovieFanartNaming filename) {
if (movieFanartFilenames.contains(filename)) {
movieFanartFilenames.remove(filename);
firePropertyChange(MOVIE_FANART_FILENAME, null, movieFanartFilenames);
}
}
public void clearMovieFanartFilenames() {
movieFanartFilenames.clear();
firePropertyChange(MOVIE_FANART_FILENAME, null, movieFanartFilenames);
}
public List<MovieFanartNaming> getMovieFanartFilenames() {
return new ArrayList<MovieFanartNaming>(this.movieFanartFilenames);
}
@XmlElement(name = IMAGE_POSTER_SIZE)
public PosterSizes getImagePosterSize() {
return imagePosterSize;
}
public void setImagePosterSize(PosterSizes newValue) {
PosterSizes oldValue = this.imagePosterSize;
this.imagePosterSize = newValue;
firePropertyChange(IMAGE_POSTER_SIZE, oldValue, newValue);
}
@XmlElement(name = IMAGE_FANART_SIZE)
public FanartSizes getImageFanartSize() {
return imageFanartSize;
}
public void setImageFanartSize(FanartSizes newValue) {
FanartSizes oldValue = this.imageFanartSize;
this.imageFanartSize = newValue;
firePropertyChange(IMAGE_FANART_SIZE, oldValue, newValue);
}
public boolean isImageExtraThumbs() {
return imageExtraThumbs;
}
public boolean isImageExtraThumbsResize() {
return imageExtraThumbsResize;
}
public int getImageExtraThumbsSize() {
return imageExtraThumbsSize;
}
public void setImageExtraThumbsResize(boolean newValue) {
boolean oldValue = this.imageExtraThumbsResize;
this.imageExtraThumbsResize = newValue;
firePropertyChange(IMAGE_EXTRATHUMBS_RESIZE, oldValue, newValue);
}
public void setImageExtraThumbsSize(int newValue) {
int oldValue = this.imageExtraThumbsSize;
this.imageExtraThumbsSize = newValue;
firePropertyChange(IMAGE_EXTRATHUMBS_SIZE, oldValue, newValue);
}
public int getImageExtraThumbsCount() {
return imageExtraThumbsCount;
}
public void setImageExtraThumbsCount(int newValue) {
int oldValue = this.imageExtraThumbsCount;
this.imageExtraThumbsCount = newValue;
firePropertyChange(IMAGE_EXTRATHUMBS_COUNT, oldValue, newValue);
}
public int getImageExtraFanartCount() {
return imageExtraFanartCount;
}
public void setImageExtraFanartCount(int newValue) {
int oldValue = this.imageExtraFanartCount;
this.imageExtraFanartCount = newValue;
firePropertyChange(IMAGE_EXTRAFANART_COUNT, oldValue, newValue);
}
public boolean isImageExtraFanart() {
return imageExtraFanart;
}
public void setImageExtraThumbs(boolean newValue) {
boolean oldValue = this.imageExtraThumbs;
this.imageExtraThumbs = newValue;
firePropertyChange(IMAGE_EXTRATHUMBS, oldValue, newValue);
}
public void setImageExtraFanart(boolean newValue) {
boolean oldValue = this.imageExtraFanart;
this.imageExtraFanart = newValue;
firePropertyChange(IMAGE_EXTRAFANART, oldValue, newValue);
}
public boolean isEnableMovieSetArtworkMovieFolder() {
return enableMovieSetArtworkMovieFolder;
}
public void setEnableMovieSetArtworkMovieFolder(boolean newValue) {
boolean oldValue = this.enableMovieSetArtworkMovieFolder;
this.enableMovieSetArtworkMovieFolder = newValue;
firePropertyChange(ENABLE_MOVIESET_ARTWORK_MOVIE_FOLDER, oldValue, newValue);
}
public boolean isEnableMovieSetArtworkFolder() {
return enableMovieSetArtworkFolder;
}
public void setEnableMovieSetArtworkFolder(boolean newValue) {
boolean oldValue = this.enableMovieSetArtworkFolder;
this.enableMovieSetArtworkFolder = newValue;
firePropertyChange(ENABLE_MOVIESET_ARTWORK_FOLDER, oldValue, newValue);
}
public String getMovieSetArtworkFolder() {
return movieSetArtworkFolder;
}
public void setMovieSetArtworkFolder(String newValue) {
String oldValue = this.movieSetArtworkFolder;
this.movieSetArtworkFolder = newValue;
firePropertyChange(MOVIESET_ARTWORK_FOLDER, oldValue, newValue);
}
@XmlElement(name = MOVIE_CONNECTOR)
public MovieConnectors getMovieConnector() {
return movieConnector;
}
public void setMovieConnector(MovieConnectors newValue) {
MovieConnectors oldValue = this.movieConnector;
this.movieConnector = newValue;
firePropertyChange(MOVIE_CONNECTOR, oldValue, newValue);
}
@XmlElement(name = MOVIE_RENAMER_PATHNAME)
public String getMovieRenamerPathname() {
return movieRenamerPathname;
}
public void setMovieRenamerPathname(String newValue) {
String oldValue = this.movieRenamerPathname;
this.movieRenamerPathname = newValue;
firePropertyChange(MOVIE_RENAMER_PATHNAME, oldValue, newValue);
}
@XmlElement(name = MOVIE_RENAMER_FILENAME)
public String getMovieRenamerFilename() {
return movieRenamerFilename;
}
public void setMovieRenamerFilename(String newValue) {
String oldValue = this.movieRenamerFilename;
this.movieRenamerFilename = newValue;
firePropertyChange(MOVIE_RENAMER_FILENAME, oldValue, newValue);
}
@XmlElement(name = MOVIE_RENAMER_SPACE_SUBSTITUTION)
public boolean isMovieRenamerSpaceSubstitution() {
return movieRenamerSpaceSubstitution;
}
public void setMovieRenamerSpaceSubstitution(boolean movieRenamerSpaceSubstitution) {
this.movieRenamerSpaceSubstitution = movieRenamerSpaceSubstitution;
}
@XmlElement(name = MOVIE_RENAMER_SPACE_REPLACEMENT)
public String getMovieRenamerSpaceReplacement() {
return movieRenamerSpaceReplacement;
}
public void setMovieRenamerSpaceReplacement(String movieRenamerSpaceReplacement) {
this.movieRenamerSpaceReplacement = movieRenamerSpaceReplacement;
}
public MovieScrapers getMovieScraper() {
if (movieScraper == null) {
return MovieScrapers.TMDB;
}
return movieScraper;
}
public void setMovieScraper(MovieScrapers newValue) {
MovieScrapers oldValue = this.movieScraper;
this.movieScraper = newValue;
firePropertyChange(MOVIE_SCRAPER, oldValue, newValue);
}
public boolean isImdbScrapeForeignLanguage() {
return imdbScrapeForeignLanguage;
}
public void setImdbScrapeForeignLanguage(boolean newValue) {
boolean oldValue = this.imdbScrapeForeignLanguage;
this.imdbScrapeForeignLanguage = newValue;
firePropertyChange(IMDB_SCRAPE_FOREIGN_LANGU, oldValue, newValue);
}
public boolean isImageScraperTmdb() {
return imageScraperTmdb;
}
public boolean isImageScraperFanartTv() {
return imageScraperFanartTv;
}
public void setImageScraperTmdb(boolean newValue) {
boolean oldValue = this.imageScraperTmdb;
this.imageScraperTmdb = newValue;
firePropertyChange(IMAGE_SCRAPER_TMDB, oldValue, newValue);
}
public void setImageScraperFanartTv(boolean newValue) {
boolean oldValue = this.imageScraperFanartTv;
this.imageScraperFanartTv = newValue;
firePropertyChange(IMAGE_SCRAPER_FANART_TV, oldValue, newValue);
}
public boolean isScrapeBestImage() {
return scrapeBestImage;
}
public void setScrapeBestImage(boolean newValue) {
boolean oldValue = this.scrapeBestImage;
this.scrapeBestImage = newValue;
firePropertyChange(SCRAPE_BEST_IMAGE, oldValue, newValue);
}
public boolean isTrailerScraperTmdb() {
return trailerScraperTmdb;
}
public boolean isTrailerScraperHdTrailers() {
return trailerScraperHdTrailers;
}
public void setTrailerScraperTmdb(boolean newValue) {
boolean oldValue = this.trailerScraperTmdb;
this.trailerScraperTmdb = newValue;
firePropertyChange(TRAILER_SCRAPER_TMDB, oldValue, newValue);
}
public void setTrailerScraperHdTrailers(boolean newValue) {
boolean oldValue = this.trailerScraperHdTrailers;
this.trailerScraperHdTrailers = newValue;
firePropertyChange(TRAILER_SCRAPER_HD_TRAILERS, oldValue, newValue);
}
public boolean isTrailerScraperOfdb() {
return trailerScraperOfdb;
}
public void setTrailerScraperOfdb(boolean newValue) {
boolean oldValue = this.trailerScraperOfdb;
this.trailerScraperOfdb = newValue;
firePropertyChange(TRAILER_SCRAPER_OFDB, oldValue, newValue);
}
public boolean isWriteActorImages() {
return writeActorImages;
}
public void setWriteActorImages(boolean newValue) {
boolean oldValue = this.writeActorImages;
this.writeActorImages = newValue;
firePropertyChange(WRITE_ACTOR_IMAGES, oldValue, newValue);
}
@XmlElement(name = SCRAPER_LANGU)
public MediaLanguages getScraperLanguage() {
return scraperLanguage;
}
public void setScraperLanguage(MediaLanguages newValue) {
MediaLanguages oldValue = this.scraperLanguage;
this.scraperLanguage = newValue;
firePropertyChange(SCRAPER_LANGU, oldValue, newValue);
}
@XmlElement(name = CERTIFICATION_COUNTRY)
public CountryCode getCertificationCountry() {
return certificationCountry;
}
public void setCertificationCountry(CountryCode newValue) {
CountryCode oldValue = this.certificationCountry;
certificationCountry = newValue;
firePropertyChange(CERTIFICATION_COUNTRY, oldValue, newValue);
}
@XmlElement(name = SCRAPER_THRESHOLD)
public double getScraperThreshold() {
return scraperThreshold;
}
public void setScraperThreshold(double newValue) {
double oldValue = this.scraperThreshold;
scraperThreshold = newValue;
firePropertyChange(SCRAPER_THRESHOLD, oldValue, newValue);
}
@XmlElement(name = MOVIE_RENAMER_NFO_CLEANUP)
public boolean isMovieRenamerNfoCleanup() {
return movieRenamerNfoCleanup;
}
public void setMovieRenamerNfoCleanup(boolean movieRenamerNfoCleanup) {
this.movieRenamerNfoCleanup = movieRenamerNfoCleanup;
}
/**
* Should we detect (and create) movies from directories containing more than one movie?
*
* @return true/false
*/
public boolean isDetectMovieMultiDir() {
return detectMovieMultiDir;
}
/**
* Should we detect (and create) movies from directories containing more than one movie?
*
* @param newValue
* true/false
*/
public void setDetectMovieMultiDir(boolean newValue) {
boolean oldValue = this.detectMovieMultiDir;
this.detectMovieMultiDir = newValue;
firePropertyChange(DETECT_MOVIE_MULTI_DIR, oldValue, newValue);
}
public boolean isBuildImageCacheOnImport() {
return buildImageCacheOnImport;
}
public void setBuildImageCacheOnImport(boolean newValue) {
boolean oldValue = this.buildImageCacheOnImport;
this.buildImageCacheOnImport = newValue;
firePropertyChange(BUILD_IMAGE_CACHE_ON_IMPORT, oldValue, newValue);
}
public boolean isMovieRenamerCreateMoviesetForSingleMovie() {
return movieRenamerCreateMoviesetForSingleMovie;
}
public void setMovieRenamerCreateMoviesetForSingleMovie(boolean newValue) {
boolean oldValue = this.movieRenamerCreateMoviesetForSingleMovie;
this.movieRenamerCreateMoviesetForSingleMovie = newValue;
firePropertyChange(MOVIE_RENAMER_MOVIESET_SINGLE_MOVIE, oldValue, newValue);
}
public boolean isRuntimeFromMediaInfo() {
return runtimeFromMediaInfo;
}
public void setRuntimeFromMediaInfo(boolean newValue) {
boolean oldValue = this.runtimeFromMediaInfo;
this.runtimeFromMediaInfo = newValue;
firePropertyChange(RUNTIME_FROM_MI, oldValue, newValue);
}
public boolean isAsciiReplacement() {
return asciiReplacement;
}
public void setAsciiReplacement(boolean newValue) {
boolean oldValue = this.asciiReplacement;
this.asciiReplacement = newValue;
firePropertyChange(ASCII_REPLACEMENT, oldValue, newValue);
}
public void addBadWord(String badWord) {
if (!badWords.contains(badWord.toLowerCase())) {
badWords.add(badWord.toLowerCase());
firePropertyChange(BAD_WORDS, null, badWords);
}
}
public void removeBadWord(String badWord) {
badWords.remove(badWord.toLowerCase());
firePropertyChange(BAD_WORDS, null, badWords);
}
public List<String> getBadWords() {
// convert to lowercase for easy contains checking
ListIterator<String> iterator = badWords.listIterator();
while (iterator.hasNext()) {
iterator.set(iterator.next().toLowerCase());
}
return badWords;
}
public boolean isYearColumnVisible() {
return yearColumnVisible;
}
public void setYearColumnVisible(boolean newValue) {
boolean oldValue = this.yearColumnVisible;
this.yearColumnVisible = newValue;
firePropertyChange(YEAR_COLUMN_VISIBLE, oldValue, newValue);
}
public boolean isRatingColumnVisible() {
return ratingColumnVisible;
}
public void setRatingColumnVisible(boolean newValue) {
boolean oldValue = this.ratingColumnVisible;
this.ratingColumnVisible = newValue;
firePropertyChange("ratingColumnVisible", oldValue, newValue);
}
public boolean isNfoColumnVisible() {
return nfoColumnVisible;
}
public void setNfoColumnVisible(boolean newValue) {
boolean oldValue = this.nfoColumnVisible;
this.nfoColumnVisible = newValue;
firePropertyChange(NFO_COLUMN_VISIBLE, oldValue, newValue);
}
public boolean isImageColumnVisible() {
return imageColumnVisible;
}
public void setImageColumnVisible(boolean newValue) {
boolean oldValue = this.imageColumnVisible;
this.imageColumnVisible = newValue;
firePropertyChange(IMAGE_COLUMN_VISIBLE, oldValue, newValue);
}
public boolean isTrailerColumnVisible() {
return trailerColumnVisible;
}
public void setTrailerColumnVisible(boolean newValue) {
boolean oldValue = this.trailerColumnVisible;
this.trailerColumnVisible = newValue;
firePropertyChange(TRAILER_COLUMN_VISIBLE, oldValue, newValue);
}
public boolean isSubtitleColumnVisible() {
return subtitleColumnVisible;
}
public void setSubtitleColumnVisible(boolean newValue) {
boolean oldValue = this.subtitleColumnVisible;
this.subtitleColumnVisible = newValue;
firePropertyChange(SUBTITLE_COLUMN_VISIBLE, oldValue, newValue);
}
public boolean isWatchedColumnVisible() {
return watchedColumnVisible;
}
public void setWatchedColumnVisible(boolean newValue) {
boolean oldValue = this.watchedColumnVisible;
this.watchedColumnVisible = newValue;
firePropertyChange(WATCHED_COLUMN_VISIBLE, oldValue, newValue);
}
public boolean isScraperFallback() {
return scraperFallback;
}
public void setScraperFallback(boolean newValue) {
boolean oldValue = this.scraperFallback;
this.scraperFallback = newValue;
firePropertyChange(SCRAPER_FALLBACK, oldValue, newValue);
}
public boolean isImageLogo() {
return imageLogo;
}
public boolean isImageBanner() {
return imageBanner;
}
public boolean isImageClearart() {
return imageClearart;
}
public boolean isImageDiscart() {
return imageDiscart;
}
public boolean isImageThumb() {
return imageThumb;
}
public void setImageLogo(boolean newValue) {
boolean oldValue = this.imageLogo;
this.imageLogo = newValue;
firePropertyChange("imageLogo", oldValue, newValue);
}
public void setImageBanner(boolean newValue) {
boolean oldValue = this.imageBanner;
this.imageBanner = newValue;
firePropertyChange("imageBanner", oldValue, newValue);
}
public void setImageClearart(boolean newValue) {
boolean oldValue = this.imageClearart;
this.imageClearart = newValue;
firePropertyChange("imageClearart", oldValue, newValue);
}
public void setImageDiscart(boolean newValue) {
boolean oldValue = this.imageDiscart;
this.imageDiscart = newValue;
firePropertyChange("imageDiscart", oldValue, newValue);
}
public void setImageThumb(boolean newValue) {
boolean oldValue = this.imageThumb;
this.imageThumb = newValue;
firePropertyChange("imageThumb", oldValue, newValue);
}
public boolean isUseTrailerPreference() {
return useTrailerPreference;
}
public void setUseTrailerPreference(boolean newValue) {
boolean oldValue = this.useTrailerPreference;
this.useTrailerPreference = newValue;
firePropertyChange("useTrailerPreference", oldValue, newValue);
}
public MovieTrailerQuality getTrailerQuality() {
return trailerQuality;
}
public void setTrailerQuality(MovieTrailerQuality newValue) {
MovieTrailerQuality oldValue = this.trailerQuality;
this.trailerQuality = newValue;
firePropertyChange("trailerQuality", oldValue, newValue);
}
public MovieTrailerSources getTrailerSource() {
return trailerSource;
}
public void setTrailerSource(MovieTrailerSources newValue) {
MovieTrailerSources oldValue = this.trailerSource;
this.trailerSource = newValue;
firePropertyChange("trailerSource", oldValue, newValue);
}
public void setSyncTrakt(boolean newValue) {
boolean oldValue = this.syncTrakt;
this.syncTrakt = newValue;
firePropertyChange("syncTrakt", oldValue, newValue);
}
public boolean getSyncTrakt() {
return syncTrakt;
}
public boolean isImageLanguagePriority() {
return imageLanguagePriority;
}
public void setImageLanguagePriority(boolean newValue) {
boolean oldValue = this.imageLanguagePriority;
this.imageLanguagePriority = newValue;
firePropertyChange("imageLanguagePriority", oldValue, newValue);
}
}
| |
package org.commoncrawl.util.shared;
import java.text.ParseException;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.StringTokenizer;
import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Pseudo flexible HTTP date parser
*
* @author rana
*
*/
public class DateUtils {
public static class DateParser {
SimpleDateFormat parsers[] = null;
public DateParser(String[] patterns) {
parsers = new SimpleDateFormat[patterns.length];
int index = 0;
for (String pattern : patterns) {
parsers[index++] = new SimpleDateFormat(pattern);
}
}
public Date parseDate(String str) throws ParseException {
if (str == null) {
throw new IllegalArgumentException("Date and Patterns must not be null");
}
ParsePosition pos = new ParsePosition(0);
for (SimpleDateFormat parser : parsers) {
Date date = parser.parse(str, pos);
pos.setIndex(0);
if (date != null && pos.getIndex() == str.length()) {
return date;
}
}
throw new ParseException("Unable to parse the date: " + str, -1);
}
}
static String kMonths[] = { "jan", "feb", "mar", "apr", "may", "jun", "jul", "aug", "sep", "oct", "nov", "dec" };
static String kDelimiters = "\t !\"#$%&'()*+,-./;<=>?@[\\]^_`{|}~";
static class TimeExploded {
int year; // Four digit year "2007"
int month; // 1-based month (values 1 = January, etc.)
int day_of_week; // 0-based day of week (0 = Sunday, etc.)
int day_of_month; // 1-based day of month (1-31)
int hour; // Hour within the current day (0-23)
int minute; // Minute within the current hour (0-59)
int second; // Second within the current minute (0-59 plus leap
// seconds which may take it up to 60).
int millisecond; // Milliseconds within the current second (0-999)
}
static Pattern timePattern = Pattern.compile("(\\d+):(\\d+):(\\d+).*");
static boolean isASCIIDigit(char c) {
return (c >= '0') & (c <= '9');
}
// Parse a cookie expiration time. We try to be lenient, but we need to
// assume some order to distinguish the fields. The basic rules:
// - The month name must be present and prefix the first 3 letters of the
// full month name (jan for January, jun for June).
// - If the year is <= 2 digits, it must occur after the day of month.
// - The time must be of the format hh:mm:ss.
// An average cookie expiration will look something like this:
// Sat, 15-Apr-17 21:01:22 GMT
public static long parseHttpDate(String time_string) {
int kMonthsLen = kMonths.length;
// We want to be pretty liberal, and support most non-ascii and non-digit
// characters as a delimiter. We can't treat : as a delimiter, because it
// is the delimiter for hh:mm:ss, and we want to keep this field together.
// We make sure to include - and +, since they could prefix numbers.
// If the cookie attribute came in in quotes (ex expires="XXX"), the quotes
// will be preserved, and we will get them here. So we make sure to include
// quote characters, and also \ for anything that was internally escaped.
TimeExploded exploded = new TimeExploded();
StringTokenizer tokenizer = new StringTokenizer(time_string, kDelimiters);
boolean found_day_of_month = false;
boolean found_month = false;
boolean found_time = false;
boolean found_year = false;
while (tokenizer.hasMoreTokens()) {
String token = tokenizer.nextToken();
boolean numerical = isASCIIDigit(token.charAt(0));
// String field
if (!numerical) {
if (!found_month) {
String tokenLowerCase = token.toLowerCase();
for (int i = 0; i < kMonthsLen; ++i) {
// Match prefix, so we could match January, etc
if (tokenLowerCase.startsWith(kMonths[i])) {
exploded.month = i + 1;
found_month = true;
break;
}
}
} else {
// If we've gotten here, it means we've already found and parsed our
// month, and we have another string, which we would expect to be the
// the time zone name. According to the RFC and my experiments with
// how sites format their expirations, we don't have much of a reason
// to support timezones. We don't want to ever barf on user input,
// but this DCHECK should pass for well-formed data.
// DCHECK(token == "GMT");
}
// Numeric field w/ a colon
} else if (token.indexOf(':') != -1) {
if (!found_time) {
Matcher m = timePattern.matcher(token);
if (m.matches()) {
try {
short hour = Short.parseShort(m.group(1));
short minute = Short.parseShort(m.group(2));
short second = Short.parseShort(m.group(3));
exploded.hour = hour;
exploded.minute = minute;
exploded.second = second;
found_time = true;
} catch (NumberFormatException e) {
}
}
} else {
// We should only ever encounter one time-like thing. If we're here,
// it means we've found a second, which shouldn't happen. We keep
// the first. This check should be ok for well-formed input:
// NOTREACHED();
}
// Numeric field
} else {
// Overflow with atoi() is unspecified, so we enforce a max length.
if (!found_day_of_month && token.length() <= 2) {
try {
exploded.day_of_month = Integer.parseInt(token);
found_day_of_month = true;
} catch (NumberFormatException e) {
}
} else if (!found_year && token.length() <= 5) {
try {
exploded.year = Integer.parseInt(token);
found_year = true;
} catch (NumberFormatException e) {
}
} else {
// If we're here, it means we've either found an extra numeric field,
// or a numeric field which was too long. For well-formed input, the
// following check would be reasonable:
// NOTREACHED();
}
}
}
if (!found_day_of_month || !found_month || !found_time || !found_year) {
// We didn't find all of the fields we need. For well-formed input, the
// following check would be reasonable:
// NOTREACHED() << "Cookie parse expiration failed: " << time_string;
return -1;
}
// Normalize the year to expand abbreviated years to the full year.
if (exploded.year >= 69 && exploded.year <= 99)
exploded.year += 1900;
if (exploded.year >= 0 && exploded.year <= 68)
exploded.year += 2000;
// If our values are within their correct ranges, we got our time.
if (exploded.day_of_month >= 1 && exploded.day_of_month <= 31 && exploded.month >= 1 && exploded.month <= 12
&& exploded.year >= 1601 && exploded.year <= 30827 && exploded.hour <= 23 && exploded.minute <= 59
&& exploded.second <= 59) {
Calendar gmtCalendar = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
gmtCalendar.set(exploded.year, exploded.month - 1, exploded.day_of_month, exploded.hour, exploded.minute,
exploded.second);
gmtCalendar.set(Calendar.MILLISECOND, 0);
return gmtCalendar.getTimeInMillis();
}
// One of our values was out of expected range. For well-formed input,
// the following check would be reasonable:
// NOTREACHED() << "Cookie exploded expiration failed: " << time_string;
return -1;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.jmeter.gui.action;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.io.File;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.prefs.Preferences;
import javax.swing.JComponent;
import javax.swing.JMenuItem;
/**
* Handles the loading of recent files, and also the content and
* visibility of menu items for loading the recent files
*/
public class LoadRecentProject extends Load {
/** Prefix for the user preference key */
private static final String USER_PREFS_KEY = "recent_file_"; //$NON-NLS-1$
/** The number of menu items used for recent files */
private static final int NUMBER_OF_MENU_ITEMS = 9;
private static final Set<String> commands = new HashSet<>();
static {
commands.add(ActionNames.OPEN_RECENT);
}
private static final Preferences prefs = Preferences.userNodeForPackage(LoadRecentProject.class);
// Note: Windows user preferences are stored relative to: HKEY_CURRENT_USER\Software\JavaSoft\Prefs
public LoadRecentProject() {
super();
}
@Override
public Set<String> getActionNames() {
return commands;
}
@Override
public void doActionAfterCheck(ActionEvent e) {
// We must ask the user if it is ok to close current project
if (!Close.performAction(e)) {
return;
}
// Load the file for this recent file command
loadProjectFile(e, getRecentFile(e), false);
}
/**
* Get the recent file for the menu item
*/
private File getRecentFile(ActionEvent e) {
JMenuItem menuItem = (JMenuItem)e.getSource();
// Get the preference for the recent files
return new File(getRecentFile(Integer.parseInt(menuItem.getName())));
}
/**
* Get the menu items to add to the menu bar, to get recent file functionality
*
* @return a List of JMenuItem, representing recent files. JMenuItem may not be visible
*/
public static List<JComponent> getRecentFileMenuItems() {
LinkedList<JComponent> menuItems = new LinkedList<>();
// Get the preference for the recent files
for(int i = 0; i < NUMBER_OF_MENU_ITEMS; i++) {
// Create the menu item
JMenuItem recentFile = new JMenuItem();
// Use the index as the name, used when processing the action
recentFile.setName(Integer.toString(i));
recentFile.addActionListener(ActionRouter.getInstance());
recentFile.setActionCommand(ActionNames.OPEN_RECENT);
// Set the KeyStroke to use
int shortKey = getShortcutKey(i);
if(shortKey >= 0) {
recentFile.setMnemonic(shortKey);
}
// Add the menu item
menuItems.add(recentFile);
}
// Update menu items to reflect recent files
updateMenuItems(menuItems);
return menuItems;
}
/**
* Update the content and visibility of the menu items for recent files
*
* @param menuItems the JMenuItem to update
* @param loadedFileName the file name of the project file that has just
* been loaded
*/
public static void updateRecentFileMenuItems(List<JComponent> menuItems, String loadedFileName) {
// Get the preference for the recent files
LinkedList<String> newRecentFiles = new LinkedList<>();
// Check if the new file is already in the recent list
boolean alreadyExists = false;
for(int i = 0; i < NUMBER_OF_MENU_ITEMS; i++) {
String recentFilePath = getRecentFile(i);
if(!loadedFileName.equals(recentFilePath)) {
newRecentFiles.add(recentFilePath);
}
else {
alreadyExists = true;
}
}
// Add the new file at the start of the list
newRecentFiles.add(0, loadedFileName);
// Remove the last item from the list if it was a brand new file
if(!alreadyExists) {
newRecentFiles.removeLast();
}
// Store the recent files
for(int i = 0; i < NUMBER_OF_MENU_ITEMS; i++) {
String fileName = newRecentFiles.get(i);
if(fileName != null) {
setRecentFile(i, fileName);
}
}
// Update menu items to reflect recent files
updateMenuItems(menuItems);
}
/**
* Set the content and visibility of menu items and menu separator,
* based on the recent file stored user preferences.
*/
private static void updateMenuItems(List<JComponent> menuItems) {
// Update the menu items
for (int i = 0; i < NUMBER_OF_MENU_ITEMS; i++) {
// Get the menu item
JMenuItem recentFile = (JMenuItem) menuItems.get(i);
// Find and set the file for this recent file command
String recentFilePath = getRecentFile(i);
if (recentFilePath != null) {
File file = new File(recentFilePath);
String sb = String.valueOf(i + 1) + " " + //$NON-NLS-1$
getMenuItemDisplayName(file);
// Index before file name
recentFile.setText(sb);
recentFile.setToolTipText(recentFilePath);
recentFile.setEnabled(true);
recentFile.setVisible(true);
} else {
recentFile.setEnabled(false);
recentFile.setVisible(false);
}
}
}
/**
* Get the name to display in the menu item, it will chop the file name
* if it is too long to display in the menu bar
*/
private static String getMenuItemDisplayName(File file) {
// Limit the length of the menu text if needed
final int maxLength = 40;
String menuText = file.getName();
if (menuText.length() > maxLength) {
menuText = "..." + menuText.substring(menuText.length() - maxLength, menuText.length()); //$NON-NLS-1$
}
return menuText;
}
/**
* Get the KeyEvent to use as shortcut key for menu item
*/
private static int getShortcutKey(int index) {
int shortKey = -1;
switch(index+1) {
case 1:
shortKey = KeyEvent.VK_1;
break;
case 2:
shortKey = KeyEvent.VK_2;
break;
case 3:
shortKey = KeyEvent.VK_3;
break;
case 4:
shortKey = KeyEvent.VK_4;
break;
case 5:
shortKey = KeyEvent.VK_5;
break;
case 6:
shortKey = KeyEvent.VK_6;
break;
case 7:
shortKey = KeyEvent.VK_7;
break;
case 8:
shortKey = KeyEvent.VK_8;
break;
case 9:
shortKey = KeyEvent.VK_9;
break;
default:
break;
}
return shortKey;
}
/**
* Get the full path to the recent file where index 0 is the most recent
* @param index the index of the recent file
* @return full path to the recent file at <code>index</code>
*/
public static String getRecentFile(int index) {
return prefs.get(USER_PREFS_KEY + index, null);
}
/**
* Set the full path to the recent file where index 0 is the most recent
*/
private static void setRecentFile(int index, String fileName) {
prefs.put(USER_PREFS_KEY + index, fileName);
}
/**
* @param fileLoadRecentFiles List of JMenuItem
* @return true if at least on JMenuItem is visible
*/
public static boolean hasVisibleMenuItem(List<JComponent> fileLoadRecentFiles) {
return fileLoadRecentFiles.stream()
.anyMatch(JComponent::isVisible);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.yarn;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.SecurityOptions;
import org.apache.flink.runtime.security.SecurityConfiguration;
import org.apache.flink.runtime.security.SecurityUtils;
import org.apache.flink.runtime.security.contexts.HadoopSecurityContext;
import org.apache.flink.test.util.SecureTestEnvironment;
import org.apache.flink.test.util.TestingSecurityContext;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.flink.yarn.util.TestHadoopModuleFactory;
import org.apache.flink.shaded.guava30.com.google.common.collect.Lists;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.hamcrest.Matchers;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
/**
* An extension of the {@link YARNSessionFIFOITCase} that runs the tests in a secured YARN cluster.
*/
public class YARNSessionFIFOSecuredITCase extends YARNSessionFIFOITCase {
protected static final Logger LOG = LoggerFactory.getLogger(YARNSessionFIFOSecuredITCase.class);
@BeforeClass
public static void setup() {
LOG.info("starting secure cluster environment for testing");
YARN_CONFIGURATION.setClass(
YarnConfiguration.RM_SCHEDULER, FifoScheduler.class, ResourceScheduler.class);
YARN_CONFIGURATION.setInt(YarnConfiguration.NM_PMEM_MB, 768);
YARN_CONFIGURATION.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 512);
YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-fifo-secured");
SecureTestEnvironment.prepare(tmp);
populateYarnSecureConfigurations(
YARN_CONFIGURATION,
SecureTestEnvironment.getHadoopServicePrincipal(),
SecureTestEnvironment.getTestKeytab());
Configuration flinkConfig = new Configuration();
flinkConfig.setString(
SecurityOptions.KERBEROS_LOGIN_KEYTAB, SecureTestEnvironment.getTestKeytab());
flinkConfig.setString(
SecurityOptions.KERBEROS_LOGIN_PRINCIPAL,
SecureTestEnvironment.getHadoopServicePrincipal());
// Setting customized security module class.
TestHadoopModuleFactory.hadoopConfiguration = YARN_CONFIGURATION;
flinkConfig.set(
SecurityOptions.SECURITY_MODULE_FACTORY_CLASSES,
Collections.singletonList("org.apache.flink.yarn.util.TestHadoopModuleFactory"));
flinkConfig.set(
SecurityOptions.SECURITY_CONTEXT_FACTORY_CLASSES,
Collections.singletonList(
"org.apache.flink.yarn.util.TestHadoopSecurityContextFactory"));
SecurityConfiguration securityConfig = new SecurityConfiguration(flinkConfig);
try {
TestingSecurityContext.install(
securityConfig, SecureTestEnvironment.getClientSecurityConfigurationMap());
// This is needed to ensure that SecurityUtils are run within a ugi.doAs section
// Since we already logged in here in @BeforeClass, even a no-op security context will
// still work.
Assert.assertTrue(
"HadoopSecurityContext must be installed",
SecurityUtils.getInstalledContext() instanceof HadoopSecurityContext);
SecurityUtils.getInstalledContext()
.runSecured(
new Callable<Object>() {
@Override
public Integer call() {
startYARNSecureMode(
YARN_CONFIGURATION,
SecureTestEnvironment.getHadoopServicePrincipal(),
SecureTestEnvironment.getTestKeytab());
return null;
}
});
} catch (Exception e) {
throw new RuntimeException(
"Exception occurred while setting up secure test context. Reason: {}", e);
}
}
@AfterClass
public static void teardownSecureCluster() {
LOG.info("tearing down secure cluster environment");
SecureTestEnvironment.cleanup();
}
@Test(timeout = 60000) // timeout after a minute.
public void testDetachedModeSecureWithPreInstallKeytab() throws Exception {
runTest(
() -> {
Map<String, String> securityProperties = new HashMap<>();
if (SecureTestEnvironment.getTestKeytab() != null) {
// client login keytab
securityProperties.put(
SecurityOptions.KERBEROS_LOGIN_KEYTAB.key(),
SecureTestEnvironment.getTestKeytab());
// pre-install Yarn local keytab, since both reuse the same temporary folder
// "tmp"
securityProperties.put(
YarnConfigOptions.LOCALIZED_KEYTAB_PATH.key(),
SecureTestEnvironment.getTestKeytab());
// unset keytab localization
securityProperties.put(YarnConfigOptions.SHIP_LOCAL_KEYTAB.key(), "false");
}
if (SecureTestEnvironment.getHadoopServicePrincipal() != null) {
securityProperties.put(
SecurityOptions.KERBEROS_LOGIN_PRINCIPAL.key(),
SecureTestEnvironment.getHadoopServicePrincipal());
}
final ApplicationId applicationId = runDetachedModeTest(securityProperties);
verifyResultContainsKerberosKeytab(applicationId);
});
}
@Test(timeout = 60000) // timeout after a minute.
@Override
public void testDetachedMode() throws Exception {
runTest(
() -> {
Map<String, String> securityProperties = new HashMap<>();
if (SecureTestEnvironment.getTestKeytab() != null) {
securityProperties.put(
SecurityOptions.KERBEROS_LOGIN_KEYTAB.key(),
SecureTestEnvironment.getTestKeytab());
}
if (SecureTestEnvironment.getHadoopServicePrincipal() != null) {
securityProperties.put(
SecurityOptions.KERBEROS_LOGIN_PRINCIPAL.key(),
SecureTestEnvironment.getHadoopServicePrincipal());
}
final ApplicationId applicationId = runDetachedModeTest(securityProperties);
verifyResultContainsKerberosKeytab(applicationId);
});
}
private static void verifyResultContainsKerberosKeytab(ApplicationId applicationId)
throws Exception {
final String[] mustHave = {"Login successful for user", "using keytab file"};
final boolean jobManagerRunsWithKerberos =
verifyStringsInNamedLogFiles(mustHave, applicationId, "jobmanager.log");
final boolean taskManagerRunsWithKerberos =
verifyStringsInNamedLogFiles(mustHave, applicationId, "taskmanager.log");
Assert.assertThat(
"The JobManager and the TaskManager should both run with Kerberos.",
jobManagerRunsWithKerberos && taskManagerRunsWithKerberos,
Matchers.is(true));
final List<String> amRMTokens =
Lists.newArrayList(AMRMTokenIdentifier.KIND_NAME.toString());
final String jobmanagerContainerId = getContainerIdByLogName("jobmanager.log");
final String taskmanagerContainerId = getContainerIdByLogName("taskmanager.log");
final boolean jobmanagerWithAmRmToken =
verifyTokenKindInContainerCredentials(amRMTokens, jobmanagerContainerId);
final boolean taskmanagerWithAmRmToken =
verifyTokenKindInContainerCredentials(amRMTokens, taskmanagerContainerId);
Assert.assertThat(
"The JobManager should have AMRMToken.",
jobmanagerWithAmRmToken,
Matchers.is(true));
Assert.assertThat(
"The TaskManager should not have AMRMToken.",
taskmanagerWithAmRmToken,
Matchers.is(false));
}
/* For secure cluster testing, it is enough to run only one test and override below test methods
* to keep the overall build time minimal
*/
@Override
public void testQueryCluster() {}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.state;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.apache.ambari.annotations.Experimental;
import org.apache.ambari.annotations.ExperimentalFeature;
import org.apache.ambari.server.controller.RepositoryResponse;
import org.apache.ambari.server.state.stack.RepoTag;
import org.apache.commons.lang.StringUtils;
import com.google.common.base.Function;
import com.google.common.base.Objects;
import com.google.common.base.Strings;
public class RepositoryInfo {
private String baseUrl;
private String osType;
private String repoId;
private String repoName;
private String distribution;
private String components;
private String mirrorsList;
private String defaultBaseUrl;
private String latestBaseUrl;
private boolean baseSaved = false;
private boolean unique = false;
private boolean ambariManagedRepositories = true;
@Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
private List<String> applicableServices = new LinkedList<>();
private Set<RepoTag> tags = new HashSet<>();
/**
* @return the baseUrl
*/
public String getBaseUrl() {
return baseUrl;
}
/**
* @param baseUrl the baseUrl to set
*/
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
/**
* @return the osType
*/
public String getOsType() {
return osType;
}
/**
* @param osType the osType to set
*/
public void setOsType(String osType) {
this.osType = osType;
}
/**
* @return the repoId
*/
public String getRepoId() {
return repoId;
}
/**
* @param repoId the repoId to set
*/
public void setRepoId(String repoId) {
this.repoId = repoId;
}
/**
* @return the repoName
*/
public String getRepoName() {
return repoName;
}
/**
* @param repoName the repoName to set
*/
public void setRepoName(String repoName) {
this.repoName = repoName;
}
public String getDistribution() {
return distribution;
}
public void setDistribution(String distribution) {
this.distribution = distribution;
}
public String getComponents() {
return components;
}
public void setComponents(String components) {
this.components = components;
}
/**
* @return the mirrorsList
*/
public String getMirrorsList() {
return mirrorsList;
}
/**
* @param mirrorsList the mirrorsList to set
*/
public void setMirrorsList(String mirrorsList) {
this.mirrorsList = mirrorsList;
}
/**
* @return the default base url
*/
public String getDefaultBaseUrl() {
return defaultBaseUrl;
}
/**
* @param url the default base url to set
*/
public void setDefaultBaseUrl(String url) {
defaultBaseUrl = url;
}
/**
* @return the latest determined base url
*/
public String getLatestBaseUrl() {
return latestBaseUrl;
}
/**
* @param url the latest determined base url
*/
public void setLatestBaseUrl(String url) {
latestBaseUrl = url;
}
/**
* @return if the base url was from a saved value
*/
public boolean isBaseUrlFromSaved() {
return baseSaved;
}
/**
* Sets if the base url was from a saved value
*/
public void setBaseUrlFromSaved(boolean saved) {
baseSaved = saved;
}
@Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
public List<String> getApplicableServices() {
return applicableServices;
}
@Experimental(feature = ExperimentalFeature.CUSTOM_SERVICE_REPOS,
comment = "Remove logic for handling custom service repos after enabling multi-mpack cluster deployment")
public void setApplicableServices(List<String> applicableServices) {
this.applicableServices = applicableServices;
}
@Override
public String toString() {
return "[ repoInfo: "
+ ", osType=" + osType
+ ", repoId=" + repoId
+ ", baseUrl=" + baseUrl
+ ", repoName=" + repoName
+ ", distribution=" + distribution
+ ", components=" + components
+ ", mirrorsList=" + mirrorsList
+ ", unique=" + unique
+ ", ambariManagedRepositories=" + ambariManagedRepositories
+ ", applicableServices=" + StringUtils.join(applicableServices, ",")
+ " ]";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RepositoryInfo that = (RepositoryInfo) o;
return Objects.equal(baseUrl, that.baseUrl) &&
Objects.equal(osType, that.osType) &&
Objects.equal(repoId, that.repoId) &&
Objects.equal(repoName, that.repoName) &&
Objects.equal(distribution, that.distribution) &&
Objects.equal(components, that.components) &&
Objects.equal(mirrorsList, that.mirrorsList) &&
Objects.equal(defaultBaseUrl, that.defaultBaseUrl) &&
Objects.equal(latestBaseUrl, that.latestBaseUrl) &&
Objects.equal(ambariManagedRepositories, that.ambariManagedRepositories);
}
@Override
public int hashCode() {
return Objects.hashCode(baseUrl, osType, repoId, repoName, distribution, components, mirrorsList, defaultBaseUrl,
latestBaseUrl, ambariManagedRepositories);
}
public RepositoryResponse convertToResponse()
{
return new RepositoryResponse(getBaseUrl(), getOsType(), getRepoId(), getRepoName(), getDistribution(),
getComponents(), getMirrorsList(), getDefaultBaseUrl(), getLatestBaseUrl(), getApplicableServices(),
getTags());
}
/**
* A function that returns the repo name of any RepositoryInfo
*/
public static final Function<RepositoryInfo, String> GET_REPO_NAME_FUNCTION = new Function<RepositoryInfo, String>() {
@Override public String apply(RepositoryInfo input) {
return input.repoName;
}
};
/**
* A function that returns the repoId of any RepositoryInfo
*/
public static final Function<RepositoryInfo, String> GET_REPO_ID_FUNCTION = new Function<RepositoryInfo, String>() {
@Override public String apply(RepositoryInfo input) {
return input.repoId;
}
};
/**
* A function that returns the baseUrl of any RepositoryInfo
*/
public static final Function<RepositoryInfo, String> SAFE_GET_BASE_URL_FUNCTION = new Function<RepositoryInfo, String>() {
@Override public String apply(RepositoryInfo input) {
return Strings.nullToEmpty(input.baseUrl);
}
};
/**
* A function that returns the osType of any RepositoryInfo
*/
public static final Function<RepositoryInfo, String> GET_OSTYPE_FUNCTION = new Function<RepositoryInfo, String>() {
@Override public String apply(RepositoryInfo input) {
return input.osType;
}
};
/**
* @return true if version of HDP that change with each release
*/
public boolean isUnique() {
return unique;
}
/**
* @param unique set is version of HDP that change with each release
*/
public void setUnique(boolean unique) {
this.unique = unique;
}
/**
* @return true if repositories managed by ambari
*/
public boolean isAmbariManagedRepositories() {
return ambariManagedRepositories;
}
/**
* @param ambariManagedRepositories set is repositories managed by ambari
*/
public void setAmbariManagedRepositories(boolean ambariManagedRepositories) {
this.ambariManagedRepositories = ambariManagedRepositories;
}
/**
* @return the tags for this repository
*/
public Set<RepoTag> getTags() {
return tags;
}
/**
* @param repoTags the tags for this repository
*/
public void setTags(Set<RepoTag> repoTags) {
tags = repoTags;
}
}
| |
package com.palookaville.bluelink;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Iterator;
import org.apache.http.HttpEntity;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.palookaville.bluelink.AsynchWebAction.AsyncWebAction;
import android.app.Activity;
import android.content.Context;
import android.os.AsyncTask;
public class ServerLink {
void crash(String msg, Throwable t){
System.out.println(msg);
t.printStackTrace();
throw new RuntimeException(msg, t);
}
public class Fetch implements AsyncWebAction {
String result;
String status = "OK";
String query;
Activity activity;
String label;
public Fetch(String query, Activity activity, String label){
this.query = query;
this.activity = activity;
this.label = label;
}
/* (non-Javadoc)
* @see com.cj.votron.AsyncWebAction#exec()
*/
@Override
public void exec() {
try {
HttpURLConnection con =
(HttpURLConnection) new URL(query).openConnection();
String rawJsonStr = readStream(con.getInputStream());
//result = jsoneriseObject(rawJsonStr);
result = voterList(rawJsonStr);
} catch (Exception e) {
crash("Error: Exec crashed", e);
}
return;
}
private String readStream(InputStream in) {
BufferedReader reader = null;
StringBuilder sb = new StringBuilder();
try {
reader = new BufferedReader(new InputStreamReader(in));
String line = "";
while ((line = reader.readLine()) != null) {
sb.append(line);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return sb.toString();
}
/* (non-Javadoc)
* @see com.cj.votron.AsyncWebAction#followUp()
*/
@Override
public void followUp() {
System.out.println("DBG: Follow up");
System.out.println(result);
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
ConfigActivity ca = (ConfigActivity)activity;
ca.display(result);
//mTextViewStrength.append(" " + getString(R.string.disconnected));
}
});
Config.getInstance().setParam(label, result);
ConfigActivity.displayBuffer = result;
}
/* (non-Javadoc)
* @see com.cj.votron.AsyncWebAction#getStatus()
*/
@Override
public String getStatus() { return status; }
/* (non-Javadoc)
* @see com.cj.votron.AsyncWebAction#getResult()
*/
@Override
public String getResult() { return result; }
}
String buffer;
Activity currentActivity;
private final static String URL_TARGET = "";
static ServerLink instance = new ServerLink();
public static ServerLink getInstance() { return instance; }
/**************************************************
*
* Election tests
*
**************************************************/
// void getElectionQuery(String query, Activity activity) {
// Fetch fetch = new Fetch(Config.SERVER + "/" + query, activity, "elections");
// asyncAction(activity,fetch);
// return;
// }
/**************************************************
*
* Dbpedia tests
*
**************************************************/
private static final String DBPEDIAQ = "http://dbpedia.org/sparql?default-graph-uri=http%3A%2F%2Fdbpedia.org&query=";
private static final String DPEDIA_JSONSPEC = "&format=json";
void getDbpediaQuery(String query, Activity activity) {
Fetch fetch = new Fetch(DBPEDIAQ + query + DPEDIA_JSONSPEC, activity, "dbpedia");
asyncAction(activity,fetch);
return;
}
void asyncAction(final Activity activity, final Fetch fetch) {
new AsyncTask<Context, Void, Void>() {
@Override
protected Void doInBackground(Context... backgroundListOfParameters) {
try {
fetch.exec();
fetch.followUp();
} catch (Exception e) {
crash("Error: Async crashed",e);
}
return null;
}
}.execute();
}
String getASCIIContentFromEntity(HttpEntity entity)
throws IllegalStateException, IOException {
InputStream in = entity.getContent();
StringBuffer out = new StringBuffer();
int n = 1;
while (n > 0) {
byte[] b = new byte[4096];
n = in.read(b);
if (n > 0)
out.append(new String(b, 0, n));
}
return out.toString();
}
private void debug(String msg) {
// TODO: FOR DEBUGGING
// Log.d(getClass().getName(), msg);
}
/***********************************************
*
* JSON processing
*
************************************************/
String jsoneriseObject(String rawJsonStr){
String cleanJson = "WTF?";
try {
JSONObject jsonObj = new JSONObject(rawJsonStr);
Iterator ks = jsonObj.keys();
Integer i = 0;
while (ks.hasNext()){
Object o = ks.next();
String str = o.toString();
String cls = o.getClass().toString();
System.out.println(">" + i + ":."+cls+"="+str);
System.out.println();
}
System.out.println("DBG: Maybe we got some?");
cleanJson = jsonObj.toString();
System.out.println(cleanJson);
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return cleanJson;
}
String voterList(String rawJsonStr){
String cleanJson = "WTF?";
StringBuilder sb = new StringBuilder();
try {
JSONArray arr = new JSONArray(rawJsonStr);
for (int i = 0; i < arr.length(); i++) {
JSONObject obj = arr.getJSONObject(i);
String voterName = (String) obj.get("name");
//String str = arr.getString(i);
sb.append(voterName).append(",");
System.out.println (voterName);
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return sb.toString();
}
String electionList(String rawJsonStr){
String cleanJson = "WTF?";
try {
JSONArray arr = new JSONArray(rawJsonStr);
for (int i = 0; i < arr.length(); i++) {
JSONObject obj = arr.getJSONObject(i);
String str = arr.getString(i);
System.out.println (str);
}
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return cleanJson;
}
}
| |
/*******************************************************************************
* Copyright (sc) 2009 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* Zend Technologies
*******************************************************************************/
package org.eclipse.freemarker.internal.core.documentModel.parser;
import java.io.Reader;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.wst.sse.core.internal.ltk.parser.BlockTokenizer;
import org.eclipse.wst.sse.core.internal.ltk.parser.RegionParser;
import org.eclipse.wst.sse.core.internal.provisional.text.IStructuredDocumentRegion;
import org.eclipse.wst.sse.core.internal.provisional.text.ITextRegion;
import org.eclipse.wst.sse.core.internal.provisional.text.ITextRegionContainer;
import org.eclipse.wst.sse.core.internal.util.Debug;
import org.eclipse.wst.xml.core.internal.parser.XMLSourceParser;
import org.eclipse.wst.xml.core.internal.regions.DOMRegionContext;
public class FMSourceParser extends XMLSourceParser {
public static ThreadLocal<IResource> editFile = new ThreadLocal<IResource>();
private IProject project;
public FMSourceParser() {
super();
IResource resource = (IResource) editFile.get();
if (resource instanceof IProject) {
project = (IProject) resource;
} else if (resource instanceof IFile) {
project = ((IFile) resource).getProject();
}
}
/*
* Change the Tokenizer used by the XMLSourceParser to make it PHP aware
*/
public BlockTokenizer getTokenizer() {
if (fTokenizer == null) {
JSPTokenizer phpTokenizer = new JSPTokenizer();
//phpTokenizer.setProject(project);
fTokenizer = phpTokenizer;
}
return fTokenizer;
}
public RegionParser newInstance() {
FMSourceParser newInstance = new FMSourceParser();
JSPTokenizer tokenizer = (JSPTokenizer) getTokenizer().newInstance();
//tokenizer.setProject(project);
newInstance.setTokenizer(tokenizer);
return newInstance;
}
private IStructuredDocumentRegion headNode = null;
private IStructuredDocumentRegion lastNode = null;
private IStructuredDocumentRegion currentNode = null;
protected IStructuredDocumentRegion parseNodes() {
// regions are initially reported as complete offsets within the
// scanned input
// they are adjusted here to be indexes from the currentNode's start
// offset
// reset the state
headNode = lastNode = currentNode = null;
ITextRegion region = null;
String type = null;
while ((region = getNextRegion()) != null) {
type = region.getType();
// these types (might) demand a IStructuredDocumentRegion for each
// of them
if (type == DOMRegionContext.BLOCK_TEXT) {
if (currentNode != null
&& currentNode.getLastRegion().getType() == DOMRegionContext.BLOCK_TEXT) {
// multiple block texts indicated embedded containers; no
// new IStructuredDocumentRegion
currentNode.addRegion(region);
currentNode.setLength(region.getStart()
+ region.getLength() - currentNode.getStart());
region.adjustStart(-currentNode.getStart());
// DW 4/16/2003 regions no longer have parents
// region.setParent(currentNode);
if (region instanceof ITextRegionContainer) {
((ITextRegionContainer) region).setParent(currentNode);
}
} else {
// not continuing a IStructuredDocumentRegion
if (currentNode != null) {
// ensure that any existing node is at least
// terminated
if (!currentNode.isEnded()) {
currentNode.setLength(region.getStart()
- currentNode.getStart());
// fCurrentNode.setTextLength(region.getStart() -
// fCurrentNode.getStart());
}
lastNode = currentNode;
}
fireNodeParsed(currentNode);
currentNode = createStructuredDocumentRegion(type);
if (lastNode != null) {
lastNode.setNext(currentNode);
}
currentNode.setPrevious(lastNode);
currentNode.setStart(region.getStart());
currentNode.setLength(region.getLength());
// currentNode.setLength(region.getStart() +
// region.getLength() - currentNode.getStart());
currentNode.setEnded(true);
region.adjustStart(-currentNode.getStart());
currentNode.addRegion(region);
// DW 4/16/2003 regions no longer have parents
// region.setParent(currentNode);
if (region instanceof ITextRegionContainer) {
((ITextRegionContainer) region).setParent(currentNode);
}
}
}
// the following contexts OPEN new StructuredDocumentRegions
else if ((currentNode != null && currentNode.isEnded())
|| (type == FMRegionContext.FM_DIRECTIVE_START)
|| (type == DOMRegionContext.XML_CONTENT)
|| (type == DOMRegionContext.XML_CHAR_REFERENCE)
|| (type == DOMRegionContext.XML_ENTITY_REFERENCE)
|| (type == DOMRegionContext.XML_TAG_OPEN)
|| (type == DOMRegionContext.XML_END_TAG_OPEN)
|| (type == DOMRegionContext.XML_COMMENT_OPEN)
|| (type == DOMRegionContext.XML_CDATA_OPEN)
|| (type == DOMRegionContext.XML_DECLARATION_OPEN)) {
if (currentNode != null) {
// ensure that any existing node is at least terminated
if (!currentNode.isEnded()) {
currentNode.setLength(region.getStart()
- currentNode.getStart());
// fCurrentNode.setTextLength(region.getStart() -
// fCurrentNode.getStart());
}
lastNode = currentNode;
}
fireNodeParsed(currentNode);
currentNode = createStructuredDocumentRegion(type);
if (lastNode != null) {
lastNode.setNext(currentNode);
}
currentNode.setPrevious(lastNode);
currentNode.setStart(region.getStart());
currentNode.addRegion(region);
currentNode.setLength(region.getStart() + region.getLength()
- currentNode.getStart());
region.adjustStart(-currentNode.getStart());
// DW 4/16/2003 regions no longer have parents
// region.setParent(currentNode);
if (region instanceof ITextRegionContainer) {
((ITextRegionContainer) region).setParent(currentNode);
}
}
// the following contexts neither open nor close
// StructuredDocumentRegions; just add to them
else if ((type == DOMRegionContext.XML_TAG_NAME)
|| (type == DOMRegionContext.XML_TAG_ATTRIBUTE_NAME)
|| (type == DOMRegionContext.XML_TAG_ATTRIBUTE_EQUALS)
|| (type == DOMRegionContext.XML_TAG_ATTRIBUTE_VALUE)
|| (type == DOMRegionContext.XML_COMMENT_TEXT)
|| (type == DOMRegionContext.XML_PI_CONTENT)
|| (type == DOMRegionContext.XML_DOCTYPE_INTERNAL_SUBSET)
|| (type == FMRegionContext.PHP_CONTENT)) {
currentNode.addRegion(region);
currentNode.setLength(region.getStart() + region.getLength()
- currentNode.getStart());
region.adjustStart(-currentNode.getStart());
// DW 4/16/2003 regions no longer have parents
// region.setParent(currentNode);
if (region instanceof ITextRegionContainer) {
((ITextRegionContainer) region).setParent(currentNode);
}
}
// the following contexts close off StructuredDocumentRegions
// cleanly
else if ((type == FMRegionContext.PHP_CLOSE)
|| (type == DOMRegionContext.XML_PI_CLOSE)
|| (type == DOMRegionContext.XML_TAG_CLOSE)
|| (type == DOMRegionContext.XML_EMPTY_TAG_CLOSE)
|| (type == DOMRegionContext.XML_COMMENT_CLOSE)
|| (type == DOMRegionContext.XML_DECLARATION_CLOSE)
|| (type == DOMRegionContext.XML_CDATA_CLOSE)) {
currentNode.setEnded(true);
currentNode.setLength(region.getStart() + region.getLength()
- currentNode.getStart());
currentNode.addRegion(region);
region.adjustStart(-currentNode.getStart());
// DW 4/16/2003 regions no longer have parents
// region.setParent(currentNode);
if (region instanceof ITextRegionContainer) {
((ITextRegionContainer) region).setParent(currentNode);
}
}
// this is extremely rare, but valid
else if (type == DOMRegionContext.WHITE_SPACE) {
ITextRegion lastRegion = currentNode.getLastRegion();
// pack the embedded container with this region
if (lastRegion instanceof ITextRegionContainer) {
ITextRegionContainer container = (ITextRegionContainer) lastRegion;
container.getRegions().add(region);
// containers must have parent set ...
// setting for EACH subregion is redundent, but not sure
// where else to do, so will do here for now.
container.setParent(currentNode);
// DW 4/16/2003 regions no longer have parents
// region.setParent(container);
if (region instanceof ITextRegionContainer) {
((ITextRegionContainer) region).setParent(currentNode);
}
region.adjustStart(container.getLength()
- region.getStart());
}
currentNode.getLastRegion().adjustLength(region.getLength());
currentNode.adjustLength(region.getLength());
} else if (type == DOMRegionContext.UNDEFINED
&& currentNode != null) {
// skip on a very-first region situation as the default
// behavior is good enough
// combine with previous if also undefined
if (currentNode.getLastRegion() != null
&& currentNode.getLastRegion().getType() == DOMRegionContext.UNDEFINED) {
currentNode.getLastRegion()
.adjustLength(region.getLength());
currentNode.adjustLength(region.getLength());
}
// previous wasn't undefined
else {
currentNode.addRegion(region);
currentNode.setLength(region.getStart()
+ region.getLength() - currentNode.getStart());
region.adjustStart(-currentNode.getStart());
}
} else {
// if an unknown type is the first region in the document,
// ensure that a node exists
if (currentNode == null) {
currentNode = createStructuredDocumentRegion(type);
currentNode.setStart(region.getStart());
}
currentNode.addRegion(region);
currentNode.setLength(region.getStart() + region.getLength()
- currentNode.getStart());
region.adjustStart(-currentNode.getStart());
// DW 4/16/2003 regions no longer have parents
// region.setParent(currentNode);
if (region instanceof ITextRegionContainer) {
((ITextRegionContainer) region).setParent(currentNode);
}
if (Debug.debugTokenizer)
System.out
.println(getClass().getName()
+ " found region of not specifically handled type " + region.getType() + " @ " + region.getStart() + "[" + region.getLength() + "]"); //$NON-NLS-4$//$NON-NLS-3$//$NON-NLS-2$//$NON-NLS-1$
//$NON-NLS-3$//$NON-NLS-2$//$NON-NLS-1$
}
// these regions also get their own node, so close them cleanly
// NOTE: these regions have new StructuredDocumentRegions created
// for them above; it may
// be more readable if that is handled here as well, but the
// current layout
// ensures that they open StructuredDocumentRegions the same way
if ((type == DOMRegionContext.XML_CONTENT)
|| (type == DOMRegionContext.XML_CHAR_REFERENCE)
|| (type == DOMRegionContext.XML_ENTITY_REFERENCE)
|| (type == FMRegionContext.PHP_CLOSE)) {
currentNode.setEnded(true);
}
if (headNode == null && currentNode != null) {
headNode = currentNode;
}
}
if (currentNode != null) {
fireNodeParsed(currentNode);
currentNode.setPrevious(lastNode);
}
// fStringInput = null;
primReset();
return headNode;
}
public void reset(Reader reader, int position) {
super.reset(reader, position);
}
}
| |
/* __ __ __ __ __ ___
* \ \ / / \ \ / / __/
* \ \/ / /\ \ \/ / /
* \____/__/ \__\____/__/
*
* Copyright 2014-2017 Vavr, http://vavr.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.vavr;
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-*\
G E N E R A T O R C R A F T E D
\*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-*/
import io.vavr.collection.Iterator;
import io.vavr.collection.List;
import io.vavr.collection.Seq;
import io.vavr.control.HashCodes;
import java.io.Serializable;
import java.util.Comparator;
import java.util.Objects;
import java.util.function.Function;
/**
* A tuple of three elements which can be seen as cartesian product of three components.
*
* @param <T1> type of the 1st element
* @param <T2> type of the 2nd element
* @param <T3> type of the 3rd element
* @author Daniel Dietrich
*/
public final class Tuple3<T1, T2, T3> implements Tuple, Comparable<Tuple3<T1, T2, T3>>, Serializable {
private static final long serialVersionUID = 1L;
/**
* The 1st element of this tuple.
*/
public final T1 _1;
/**
* The 2nd element of this tuple.
*/
public final T2 _2;
/**
* The 3rd element of this tuple.
*/
public final T3 _3;
/**
* Constructs a tuple of three elements.
*
* @param t1 the 1st element
* @param t2 the 2nd element
* @param t3 the 3rd element
*/
public Tuple3(T1 t1, T2 t2, T3 t3) {
this._1 = t1;
this._2 = t2;
this._3 = t3;
}
public static <T1, T2, T3> Comparator<Tuple3<T1, T2, T3>> comparator(Comparator<? super T1> t1Comp, Comparator<? super T2> t2Comp, Comparator<? super T3> t3Comp) {
return (Comparator<Tuple3<T1, T2, T3>> & Serializable) (t1, t2) -> {
final int check1 = t1Comp.compare(t1._1, t2._1);
if (check1 != 0) {
return check1;
}
final int check2 = t2Comp.compare(t1._2, t2._2);
if (check2 != 0) {
return check2;
}
final int check3 = t3Comp.compare(t1._3, t2._3);
if (check3 != 0) {
return check3;
}
// all components are equal
return 0;
};
}
@SuppressWarnings("unchecked")
private static <U1 extends Comparable<? super U1>, U2 extends Comparable<? super U2>, U3 extends Comparable<? super U3>> int compareTo(Tuple3<?, ?, ?> o1, Tuple3<?, ?, ?> o2) {
final Tuple3<U1, U2, U3> t1 = (Tuple3<U1, U2, U3>) o1;
final Tuple3<U1, U2, U3> t2 = (Tuple3<U1, U2, U3>) o2;
final int check1 = t1._1.compareTo(t2._1);
if (check1 != 0) {
return check1;
}
final int check2 = t1._2.compareTo(t2._2);
if (check2 != 0) {
return check2;
}
final int check3 = t1._3.compareTo(t2._3);
if (check3 != 0) {
return check3;
}
// all components are equal
return 0;
}
@Override
public int arity() {
return 3;
}
@Override
public int compareTo(Tuple3<T1, T2, T3> that) {
return Tuple3.compareTo(this, that);
}
/**
* Getter of the 1st element of this tuple.
*
* @return the 1st element of this Tuple.
*/
public T1 _1() {
return _1;
}
/**
* Sets the 1st element of this tuple to the given {@code value}.
*
* @param value the new value
* @return a copy of this tuple with a new value for the 1st element of this Tuple.
*/
public Tuple3<T1, T2, T3> update1(T1 value) {
return new Tuple3<>(value, _2, _3);
}
/**
* Getter of the 2nd element of this tuple.
*
* @return the 2nd element of this Tuple.
*/
public T2 _2() {
return _2;
}
/**
* Sets the 2nd element of this tuple to the given {@code value}.
*
* @param value the new value
* @return a copy of this tuple with a new value for the 2nd element of this Tuple.
*/
public Tuple3<T1, T2, T3> update2(T2 value) {
return new Tuple3<>(_1, value, _3);
}
/**
* Getter of the 3rd element of this tuple.
*
* @return the 3rd element of this Tuple.
*/
public T3 _3() {
return _3;
}
/**
* Sets the 3rd element of this tuple to the given {@code value}.
*
* @param value the new value
* @return a copy of this tuple with a new value for the 3rd element of this Tuple.
*/
public Tuple3<T1, T2, T3> update3(T3 value) {
return new Tuple3<>(_1, _2, value);
}
/**
* Maps the components of this tuple using a mapper function.
*
* @param mapper the mapper function
* @param <U1> new type of the 1st component
* @param <U2> new type of the 2nd component
* @param <U3> new type of the 3rd component
* @return A new Tuple of same arity.
* @throws NullPointerException if {@code mapper} is null
*/
public <U1, U2, U3> Tuple3<U1, U2, U3> map(Function3<? super T1, ? super T2, ? super T3, Tuple3<U1, U2, U3>> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
return mapper.apply(_1, _2, _3);
}
/**
* Maps the components of this tuple using a mapper function for each component.
*
* @param f1 the mapper function of the 1st component
* @param f2 the mapper function of the 2nd component
* @param f3 the mapper function of the 3rd component
* @param <U1> new type of the 1st component
* @param <U2> new type of the 2nd component
* @param <U3> new type of the 3rd component
* @return A new Tuple of same arity.
* @throws NullPointerException if one of the arguments is null
*/
public <U1, U2, U3> Tuple3<U1, U2, U3> map(Function<? super T1, ? extends U1> f1, Function<? super T2, ? extends U2> f2, Function<? super T3, ? extends U3> f3) {
Objects.requireNonNull(f1, "f1 is null");
Objects.requireNonNull(f2, "f2 is null");
Objects.requireNonNull(f3, "f3 is null");
return Tuple.of(f1.apply(_1), f2.apply(_2), f3.apply(_3));
}
/**
* Maps the 1st component of this tuple to a new value.
*
* @param <U> new type of the 1st component
* @param mapper A mapping function
* @return a new tuple based on this tuple and substituted 1st component
*/
public <U> Tuple3<U, T2, T3> map1(Function<? super T1, ? extends U> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
final U u = mapper.apply(_1);
return Tuple.of(u, _2, _3);
}
/**
* Maps the 2nd component of this tuple to a new value.
*
* @param <U> new type of the 2nd component
* @param mapper A mapping function
* @return a new tuple based on this tuple and substituted 2nd component
*/
public <U> Tuple3<T1, U, T3> map2(Function<? super T2, ? extends U> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
final U u = mapper.apply(_2);
return Tuple.of(_1, u, _3);
}
/**
* Maps the 3rd component of this tuple to a new value.
*
* @param <U> new type of the 3rd component
* @param mapper A mapping function
* @return a new tuple based on this tuple and substituted 3rd component
*/
public <U> Tuple3<T1, T2, U> map3(Function<? super T3, ? extends U> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
final U u = mapper.apply(_3);
return Tuple.of(_1, _2, u);
}
/**
* Transforms this tuple to an object of type U.
*
* @param f Transformation which creates a new object of type U based on this tuple's contents.
* @param <U> type of the transformation result
* @return An object of type U
* @throws NullPointerException if {@code f} is null
*/
public <U> U apply(Function3<? super T1, ? super T2, ? super T3, ? extends U> f) {
Objects.requireNonNull(f, "f is null");
return f.apply(_1, _2, _3);
}
@Override
public Seq<?> toSeq() {
return List.of(_1, _2, _3);
}
// -- Object
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
} else if (!(o instanceof Tuple3)) {
return false;
} else {
final Tuple3<?, ?, ?> that = (Tuple3<?, ?, ?>) o;
return Objects.equals(this._1, that._1)
&& Objects.equals(this._2, that._2)
&& Objects.equals(this._3, that._3);
}
}
@Override
public int hashCode() {
return HashCodes.hash(_1, _2, _3);
}
@Override
public String toString() {
return "(" + _1 + ", " + _2 + ", " + _3 + ")";
}
}
| |
//@@author A0147984L
package seedu.address.model.task;
import java.util.Objects;
import seedu.address.commons.exceptions.IllegalValueException;
import seedu.address.commons.util.CollectionUtil;
import seedu.address.model.tag.Tag;
/**
* Represents a Task in the address book.
* Guarantees: details are present and not null, field values are validated.
*/
public class Task implements ReadOnlyTask {
protected Name name;
protected TaskDate date;
protected TaskTime time;
protected Description description;
protected Venue venue;
protected Priority priority;
protected boolean isFavorite;
protected FinishProperty isFinished;
protected Tag tag;
protected EventProperty isEvent;
protected RecurringProperty isRecurring;
/**
* Every field must not be null.
*/
public Task(Name name, TaskDate date, TaskTime time, Description description, Tag tag,
Venue venue, Priority priority, boolean isFavorite) {
//assert !CollectionUtil.isAnyNull(name, date, time, description, tag, venue, priority, isFavorite);
assert !CollectionUtil.isAnyNull(name, date, time, description, tag,
venue, priority, isFavorite);
this.name = name;
this.date = date;
this.time = time;
this.description = description;
this.tag = tag;
this.venue = venue;
this.priority = priority;
this.isFavorite = isFavorite;
this.isFinished = FinishProperty.UNFINISHED;
this.isEvent = EventProperty.NON_EVENT;
this.isRecurring = RecurringProperty.NON_RECURRING;
}
/**
* Constructor with flag on isFavorite
*/
public Task(Name name, TaskDate date, TaskTime time, Description description, Tag tag,
Venue venue, Priority priority, boolean isFavorite, FinishProperty isFinished) {
assert !CollectionUtil.isAnyNull(name);
this.name = name;
this.date = date;
this.time = time;
this.description = description;
this.tag = tag;
this.venue = venue;
this.priority = priority;
this.isFavorite = isFavorite;
this.isFinished = isFinished;
this.isEvent = EventProperty.NON_EVENT;
this.isRecurring = RecurringProperty.NON_RECURRING;
}
/**
* Constructor of task with flag on isFinshed, flag on isEvent, flag on isRecurring
*/
public Task(Name name, TaskDate date, TaskTime time, Description description, Tag tag,
Venue venue, Priority priority, boolean isFavorite, FinishProperty isFinished,
EventProperty isEvent, RecurringProperty isRecurring) {
assert !CollectionUtil.isAnyNull(name);
this.name = name;
this.date = date;
this.time = time;
this.description = description;
this.tag = tag;
this.venue = venue;
this.priority = priority;
this.isFavorite = isFavorite;
this.isFinished = isFinished;
this.isEvent = isEvent;
this.isRecurring = isRecurring;
}
/**
* Creates a copy of the given ReadOnlyTask.
* @throws IllegalValueException
*/
public Task(ReadOnlyTask source) throws IllegalValueException {
this(new Name(source.getName().getValue()), new TaskDate(source.getDate().getValue()),
new TaskTime(source.getTime().getValue()), new Description(source.getDescription().getValue()),
new Tag(source.getTag().getValue()), new Venue(source.getVenue().getValue()),
new Priority(source.getPriority().getValue()), source.isFavorite(), source.getFinished(),
source.getEventProperty(), source.getRecurringProperty());
}
public void setName(Name name) {
assert name != null;
this.name = name;
}
@Override
public Name getName() {
return name;
}
public void setDate(TaskDate date) {
this.date = date;
}
@Override
public TaskDate getDate() {
return date;
}
public void setTime(TaskTime time) {
this.time = time;
}
@Override
public TaskTime getTime() {
return time;
}
public void setDescription(Description description) {
this.description = description;
}
@Override
public Description getDescription() {
return description;
}
@Override
public Tag getTag() {
return tag;
}
public void setTag(Tag tag) {
assert tag != null;
this.tag = tag;
}
@Override
public Venue getVenue() {
return venue;
}
public void setVenue(Venue venue) {
this.venue = venue;
}
@Override
public Priority getPriority() {
return priority;
}
public void setPriority(Priority priority) {
this.priority = priority;
}
@Override
public boolean isFavorite() {
return isFavorite;
}
@Override
public String getFavoriteText() {
if (isFavorite) {
return "Favorite \u2764";
} else {
return "";
}
}
public void setFavorite(boolean isFavorite) {
this.isFavorite = isFavorite;
}
@Override
public boolean isFinished() {
return isFinished == FinishProperty.FINISHED;
}
@Override
public String getFinishedText() {
if (isFinished()) {
return "Finished";
} else {
return "Unfinished";
}
}
public void setFinish(Boolean isFinished) {
if (isFinished) {
this.isFinished = FinishProperty.FINISHED;
} else {
this.isFinished = FinishProperty.UNFINISHED;
}
}
/**
* Updates this task with the details of {@code replacement}.
*/
public void resetData(ReadOnlyTask replacement) {
assert replacement != null;
try {
this.setName(new Name(replacement.getName().getValue()));
this.setDate(new TaskDate(replacement.getDate().getValue()));
this.setTime(new TaskTime(replacement.getTime().getValue()));
this.setDescription(new Description(replacement.getDescription().getValue()));
this.setTag(new Tag(replacement.getTag().getValue()));
this.setVenue(new Venue(replacement.getVenue().getValue()));
this.setPriority(new Priority(replacement.getPriority().getValue()));
this.setFavorite(replacement.isFavorite());
this.setFinish(replacement.isFinished());
this.setIsEvent(replacement.getEventProperty());
this.setIsRecurring(replacement.getRecurringProperty());
} catch (IllegalValueException e) {
e.printStackTrace();
}
}
private void setIsEvent(EventProperty isEvent) {
this.isEvent = isEvent;
}
private void setIsRecurring(RecurringProperty isRecurring) {
this.isRecurring = isRecurring;
}
@Override
public boolean equals(Object other) {
return other == this // short circuit if same object
|| (other instanceof ReadOnlyTask // instanceof handles nulls
&& this.isSameStateAs((ReadOnlyTask) other));
}
@Override
public int hashCode() {
// use this method for custom fields hashing instead of implementing your own
return Objects.hash(name, date, time, description, tag, venue, priority, isFavorite, isFinished);
}
@Override
public String toString() {
return getAsText();
}
@Override
public boolean isEvent() {
return this.isEvent == EventProperty.EVENT;
}
@Override
public boolean isRecurring() {
return this.isRecurring == RecurringProperty.RECURRING;
}
@Override
public FinishProperty getFinished() {
return this.isFinished;
}
@Override
public EventProperty getEventProperty() {
return this.isEvent;
}
public void setRecurringProperty(RecurringProperty isRecurring) {
this.isRecurring = isRecurring;
}
@Override
public RecurringProperty getRecurringProperty() {
return this.isRecurring;
}
}
| |
package com.fly.cj.ui.activity.Login;
import android.app.AlertDialog;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.provider.Settings;
import android.text.Spannable;
import android.text.SpannableStringBuilder;
import android.text.method.PasswordTransformationMethod;
import android.text.style.ForegroundColorSpan;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import android.widget.Toast;
import com.fly.cj.Controller;
import com.fly.cj.FireFlyApplication;
import com.fly.cj.MainFragmentActivity;
import com.fly.cj.R;
import com.fly.cj.api.obj.LoginReceive;
import com.fly.cj.base.BaseFragment;
import com.fly.cj.ui.activity.ForgotPassword.ForgotPasswordActivity;
import com.fly.cj.ui.activity.FragmentContainerActivity;
import com.fly.cj.ui.activity.Homepage.HomeActivity;
import com.fly.cj.ui.activity.UpdateProfile.UpdateProfileActivity;
import com.fly.cj.ui.activity.Register.RegisterActivity;
import com.fly.cj.ui.module.LoginModule;
import com.fly.cj.ui.object.CachedResult;
import com.fly.cj.ui.object.LoginRequest;
import com.fly.cj.ui.presenter.LoginPresenter;
import com.fly.cj.utils.RealmObjectController;
import com.fly.cj.utils.SharedPrefManager;
import com.fly.cj.utils.Utils;
import com.google.android.gms.analytics.Tracker;
import com.google.gson.Gson;
import com.mobsandgeeks.saripaar.ValidationError;
import com.mobsandgeeks.saripaar.Validator;
import com.mobsandgeeks.saripaar.annotation.NotEmpty;
import com.mobsandgeeks.saripaar.annotation.Order;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import javax.inject.Inject;
import butterknife.ButterKnife;
import butterknife.InjectView;
import cn.pedant.SweetAlert.SweetAlertDialog;
import de.keyboardsurfer.android.widget.crouton.Crouton;
import de.keyboardsurfer.android.widget.crouton.Style;
import io.realm.RealmResults;
public class LoginFragment extends BaseFragment implements LoginPresenter.LoginView,Validator.ValidationListener {
@Inject
LoginPresenter presenter;
@InjectView(R.id.registerBtn)
Button registerBtn;
@InjectView(R.id.btnLogin)
Button btnLogin;
@InjectView(R.id.txtForgotPassword)
TextView txtForgotPassword;
@NotEmpty(sequence = 1, message = "Sila isi e-mel")
@Order(1)
@InjectView(R.id.txtLoginEmail) EditText txtLoginEmail;
@NotEmpty(sequence = 1, message = "Sila isi kata laluan")
//@Length(sequence = 2, min = 6, max = 16 , message = "Must be at least 8 and maximum 16 characters")
@Order(2)
@InjectView(R.id.txtLoginPassword) EditText txtLoginPassword;
private AlertDialog dialog;
private SharedPrefManager pref;
private String storePassword, storeUsername, storeAuth_token, storeSignature, storeId;
private int fragmentContainerId;
private Validator mValidator;
private Tracker mTracker;
public static LoginFragment newInstance() {
LoginFragment fragment = new LoginFragment();
Bundle args = new Bundle();
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//inject (singleton)
FireFlyApplication.get(getActivity()).createScopedGraph(new LoginModule(this)).inject(this);
RealmObjectController.clearCachedResult(getActivity());
// Validator
mValidator = new Validator(this);
mValidator.setValidationListener(this);
mValidator.setValidationMode(Validator.Mode.BURST);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.login, container, false);
ButterKnife.inject(this, view);
//Email hint
String simple = "E-mel ";
String colored = "*";
SpannableStringBuilder builder = new SpannableStringBuilder();
builder.append(simple);
int start = builder.length();
builder.append(colored);
int end = builder.length();
builder.setSpan(new ForegroundColorSpan(Color.RED), start, end,
Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
txtLoginEmail.setHint(builder);
//Password hint
String simple2 = "Kata Laluan ";
String colored2 = "*";
SpannableStringBuilder builder2 = new SpannableStringBuilder();
builder2.append(simple2);
int start2 = builder2.length();
builder2.append(colored2);
int end2 = builder2.length();
builder2.setSpan(new ForegroundColorSpan(Color.RED), start2, end2,
Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
txtLoginPassword.setHint(builder2);
pref = new SharedPrefManager(getActivity());
//set edittext password input type
txtLoginPassword.setTransformationMethod(new PasswordTransformationMethod());
btnLogin.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//AnalyticsApplication.sendEvent("Click", "btnLogin");
mValidator.validate();
Utils.hideKeyboard(getActivity(), v);
}
});
registerBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent register = new Intent(getActivity(), RegisterActivity.class);
getActivity().startActivity(register);
}
});
txtForgotPassword.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//AnalyticsApplication.sendEvent("Click", "forget password");
forgotPassword();
}
});
return view;
}
public void forgotPassword()
{
Intent forgotPage = new Intent(getActivity(), ForgotPasswordActivity.class);
getActivity().startActivity(forgotPage);
getActivity().finish();
}
public void loginFromFragment(String username,String password){
/*Start Loading*/
initiateLoading(getActivity());
String deviceId = Settings.Secure.getString(getActivity().getContentResolver(), Settings.Secure.ANDROID_ID);
//insert value into object
LoginRequest data = new LoginRequest();
data.setUsername(username);
data.setPassword(password);
data.setDeviceId(deviceId);
//save username & password
storeUsername = username;
storePassword = password;
//start call api at presenter fail
presenter.loginFunction(data);
}
public void homepage()
{
Intent loginPage = new Intent(getActivity(), HomeActivity.class);
getActivity().startActivity(loginPage);
loginPage.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
getActivity().finish();
}
public void profile()
{
Intent profilePage = new Intent(getActivity(), UpdateProfileActivity.class);
getActivity().startActivity(profilePage);
profilePage.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
getActivity().finish();
}
@Override
public void onLoginSuccess(LoginReceive obj) {
/*Dismiss Loading*/
dismissLoading();
pref.setUserEmail(storeUsername);
pref.setUserPassword(storePassword);
pref.setUserName(obj.getUser().getUserName());
Boolean status = Controller.getRequestStatus(obj.getStatus(), "", getActivity());
if (status) {
pref.setLoginStatus("Y");
storeAuth_token = obj.getAuth_token();
storeSignature = obj.getSignature();
storeId = obj.getUser().getUserId();
Log.e("uniqid", storeId);
pref.setUserAuth(storeAuth_token);
pref.setSignatureToLocalStorage(storeSignature);
pref.setUniqId(storeId);
pref.setUserName(obj.getUser().getUserName());
String dob = obj.getUser().getUserDob();
SimpleDateFormat dateFormat = new SimpleDateFormat("dd/MM/yyyy");
try {
Date date = dateFormat.parse(dob);
System.out.println(date);
int d = getDay(date);
System.out.println(d);
Calendar calendar = Calendar.getInstance();
int year = calendar.get(Calendar.YEAR);
int real = year - d;
String age = String.valueOf(real);
pref.setUserAge(age + " tahun");
Log.e("Age", age);
} catch (ParseException e) {
e.printStackTrace();
}
//-------------------------CALL FROM PREF----------------------------------//
HashMap<String, String> initAuth = pref.getAuth();
String token = initAuth.get(SharedPrefManager.USER_AUTH);
HashMap<String, String> initSign = pref.getSignatureFromLocalStorage();
String sign = initSign.get(SharedPrefManager.SIGNATURE);
//Toast.makeText(getActivity(), sign, Toast.LENGTH_LONG).show();
Log.e(storeUsername,storePassword);
Log.e("Login Status",obj.getStatus());
Log.e("Signature ", sign);
Log.e("Token ", token);
new SweetAlertDialog(getActivity(), SweetAlertDialog.SUCCESS_TYPE)
.setTitleText("Successfully Login!")
.setConfirmClickListener(new SweetAlertDialog.OnSweetClickListener() {
@Override
public void onClick(SweetAlertDialog sDialog) {
homepage();
}
})
.show();
}
}
/*IF Login Failed*/
@Override
public void onLoginFailed(String obj) {
Crouton.makeText(getActivity(), obj, Style.ALERT).show();
setAlertDialog(getActivity(),obj,"Login Error");
}
/*
@Override
public void onRequestPasswordSuccess(ForgotPasswordReceive obj) {
dismissLoading();
Log.e("Message", obj.getMessage());
Boolean status = Controller.getRequestStatus(obj.getStatus(), obj.getMessage(), getActivity());
if (status) {
setSuccessDialog(getActivity(), obj.getMessage(),null,"Success!");
}
}
*/
/* Validation Success - Start send data to server */
@Override
public void onValidationSucceeded() {
loginFromFragment(txtLoginEmail.getText().toString(),txtLoginPassword.getText().toString());
}
/* Validation Failed - Toast Error */
@Override
public void onValidationFailed(List<ValidationError> errors) {
for (ValidationError error : errors) {
View view = error.getView();
setShake(view);
/* Split Error Message. Display first sequence only */
String message = error.getCollatedErrorMessage(getActivity());
String splitErrorMsg[] = message.split("\\r?\\n");
// Display error messages
if (view instanceof EditText) {
((EditText) view).setError(splitErrorMsg[0]);
} else {
croutonAlert(getActivity(), splitErrorMsg[0]);
}
}
}
//Popup Forgot Password
/*public void forgotPassword(){
LayoutInflater li = LayoutInflater.from(getActivity());
final View myView = li.inflate(R.layout.forgot_password, null);
Button cont = (Button)myView.findViewById(R.id.btncontinue);
final EditText editEmail = (EditText)myView.findViewById(R.id.forgot_email);
final String emailPattern = "[a-zA-Z0-9._-]+@[a-z]+\\.+[a-z]+";
cont.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(editEmail.getText().toString().equals("")) {
Toast.makeText(getActivity(), "Email is required", Toast.LENGTH_LONG).show();
}
else if (!editEmail.getText().toString().matches(emailPattern)) {
Toast.makeText(getActivity(), "Invalid Email", Toast.LENGTH_LONG).show();
}
else{
requestForgotPassword(editEmail.getText().toString(),"");
dialog.dismiss();
}
}
});
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setView(myView);
dialog = builder.create();
WindowManager.LayoutParams lp = new WindowManager.LayoutParams();
lp.copyFrom(dialog.getWindow().getAttributes());
lp.width = WindowManager.LayoutParams.MATCH_PARENT;
//lp.height = 570;
dialog.getWindow().setAttributes(lp);
dialog.show();
}*/
/* public void requestForgotPassword(String username,String signature){
initiateLoading(getActivity());
PasswordRequest data = new PasswordRequest();
data.setEmail(username);
data.setSignature(signature);
presenter.forgotPassword(data);
}*/
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
fragmentContainerId = ((FragmentContainerActivity) getActivity()).getFragmentContainerId();
}
@Override
public void onResume() {
super.onResume();
presenter.onResume();
RealmResults<CachedResult> result = RealmObjectController.getCachedResult(MainFragmentActivity.getContext());
if(result.size() > 0){
Gson gson = new Gson();
LoginReceive obj = gson.fromJson(result.get(0).getCachedResult(), LoginReceive.class);
onLoginSuccess(obj);
}
}
@Override
public void onPause() {
super.onPause();
presenter.onPause();
}
public static int getDay(Date date) {
Calendar cal = Calendar.getInstance();
cal.setTime(date);
return cal.get(Calendar.YEAR);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.jdbc;
import io.trino.spi.connector.AggregateFunction;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ColumnMetadata;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.ConnectorSplitSource;
import io.trino.spi.connector.ConnectorTableMetadata;
import io.trino.spi.connector.JoinStatistics;
import io.trino.spi.connector.JoinType;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.connector.SystemTable;
import io.trino.spi.connector.TableScanRedirectApplicationResult;
import io.trino.spi.predicate.TupleDomain;
import io.trino.spi.statistics.TableStatistics;
import io.trino.spi.type.Type;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.function.Supplier;
import static java.util.Objects.requireNonNull;
public abstract class ForwardingJdbcClient
implements JdbcClient
{
public static JdbcClient of(Supplier<JdbcClient> jdbcClientSupplier)
{
requireNonNull(jdbcClientSupplier, "jdbcClientSupplier is null");
return new ForwardingJdbcClient()
{
@Override
public JdbcClient delegate()
{
return requireNonNull(jdbcClientSupplier.get(), "jdbcClientSupplier.get() is null");
}
};
}
protected abstract JdbcClient delegate();
@Override
public boolean schemaExists(ConnectorSession session, String schema)
{
return delegate().schemaExists(session, schema);
}
@Override
public Set<String> getSchemaNames(ConnectorSession session)
{
return delegate().getSchemaNames(session);
}
@Override
public List<SchemaTableName> getTableNames(ConnectorSession session, Optional<String> schema)
{
return delegate().getTableNames(session, schema);
}
@Override
public Optional<JdbcTableHandle> getTableHandle(ConnectorSession session, SchemaTableName schemaTableName)
{
return delegate().getTableHandle(session, schemaTableName);
}
@Override
public List<JdbcColumnHandle> getColumns(ConnectorSession session, JdbcTableHandle tableHandle)
{
return delegate().getColumns(session, tableHandle);
}
@Override
public Optional<ColumnMapping> toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
{
return delegate().toColumnMapping(session, connection, typeHandle);
}
@Override
public List<ColumnMapping> toColumnMappings(ConnectorSession session, List<JdbcTypeHandle> typeHandles)
{
return delegate().toColumnMappings(session, typeHandles);
}
@Override
public WriteMapping toWriteMapping(ConnectorSession session, Type type)
{
return delegate().toWriteMapping(session, type);
}
@Override
public boolean supportsAggregationPushdown(ConnectorSession session, JdbcTableHandle table, List<AggregateFunction> aggregates, Map<String, ColumnHandle> assignments, List<List<ColumnHandle>> groupingSets)
{
return delegate().supportsAggregationPushdown(session, table, aggregates, assignments, groupingSets);
}
@Override
public Optional<JdbcExpression> implementAggregation(ConnectorSession session, AggregateFunction aggregate, Map<String, ColumnHandle> assignments)
{
return delegate().implementAggregation(session, aggregate, assignments);
}
@Override
public ConnectorSplitSource getSplits(ConnectorSession session, JdbcTableHandle layoutHandle)
{
return delegate().getSplits(session, layoutHandle);
}
@Override
public Connection getConnection(ConnectorSession session, JdbcSplit split)
throws SQLException
{
return delegate().getConnection(session, split);
}
@Override
public void abortReadConnection(Connection connection, ResultSet resultSet)
throws SQLException
{
delegate().abortReadConnection(connection, resultSet);
}
@Override
public PreparedQuery prepareQuery(
ConnectorSession session,
JdbcTableHandle table,
Optional<List<List<JdbcColumnHandle>>> groupingSets,
List<JdbcColumnHandle> columns,
Map<String, String> columnExpressions)
{
return delegate().prepareQuery(session, table, groupingSets, columns, columnExpressions);
}
@Override
public PreparedStatement buildSql(ConnectorSession session, Connection connection, JdbcSplit split, JdbcTableHandle tableHandle, List<JdbcColumnHandle> columnHandles)
throws SQLException
{
return delegate().buildSql(session, connection, split, tableHandle, columnHandles);
}
@Override
public Optional<PreparedQuery> implementJoin(
ConnectorSession session,
JoinType joinType,
PreparedQuery leftSource,
PreparedQuery rightSource,
List<JdbcJoinCondition> joinConditions,
Map<JdbcColumnHandle, String> rightAssignments,
Map<JdbcColumnHandle, String> leftAssignments,
JoinStatistics statistics)
{
return delegate().implementJoin(session, joinType, leftSource, rightSource, joinConditions, rightAssignments, leftAssignments, statistics);
}
@Override
public JdbcOutputTableHandle beginCreateTable(ConnectorSession session, ConnectorTableMetadata tableMetadata)
{
return delegate().beginCreateTable(session, tableMetadata);
}
@Override
public void commitCreateTable(ConnectorSession session, JdbcOutputTableHandle handle)
{
delegate().commitCreateTable(session, handle);
}
@Override
public JdbcOutputTableHandle beginInsertTable(ConnectorSession session, JdbcTableHandle tableHandle, List<JdbcColumnHandle> columns)
{
return delegate().beginInsertTable(session, tableHandle, columns);
}
@Override
public void finishInsertTable(ConnectorSession session, JdbcOutputTableHandle handle)
{
delegate().finishInsertTable(session, handle);
}
@Override
public void dropTable(ConnectorSession session, JdbcTableHandle jdbcTableHandle)
{
delegate().dropTable(session, jdbcTableHandle);
}
@Override
public void rollbackCreateTable(ConnectorSession session, JdbcOutputTableHandle handle)
{
delegate().rollbackCreateTable(session, handle);
}
@Override
public String buildInsertSql(JdbcOutputTableHandle handle, List<WriteFunction> columnWriters)
{
return delegate().buildInsertSql(handle, columnWriters);
}
@Override
public Connection getConnection(ConnectorSession session, JdbcOutputTableHandle handle)
throws SQLException
{
return delegate().getConnection(session, handle);
}
@Override
public PreparedStatement getPreparedStatement(Connection connection, String sql)
throws SQLException
{
return delegate().getPreparedStatement(connection, sql);
}
@Override
public TableStatistics getTableStatistics(ConnectorSession session, JdbcTableHandle handle, TupleDomain<ColumnHandle> tupleDomain)
{
return delegate().getTableStatistics(session, handle, tupleDomain);
}
@Override
public boolean supportsTopN(ConnectorSession session, JdbcTableHandle handle, List<JdbcSortItem> sortOrder)
{
return delegate().supportsTopN(session, handle, sortOrder);
}
@Override
public boolean isTopNGuaranteed(ConnectorSession session)
{
return delegate().isTopNGuaranteed(session);
}
@Override
public boolean supportsLimit()
{
return delegate().supportsLimit();
}
@Override
public boolean isLimitGuaranteed(ConnectorSession session)
{
return delegate().isLimitGuaranteed(session);
}
@Override
public void setColumnComment(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column, Optional<String> comment)
{
delegate().setColumnComment(session, handle, column, comment);
}
@Override
public void addColumn(ConnectorSession session, JdbcTableHandle handle, ColumnMetadata column)
{
delegate().addColumn(session, handle, column);
}
@Override
public void dropColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle column)
{
delegate().dropColumn(session, handle, column);
}
@Override
public void renameColumn(ConnectorSession session, JdbcTableHandle handle, JdbcColumnHandle jdbcColumn, String newColumnName)
{
delegate().renameColumn(session, handle, jdbcColumn, newColumnName);
}
@Override
public void renameTable(ConnectorSession session, JdbcTableHandle handle, SchemaTableName newTableName)
{
delegate().renameTable(session, handle, newTableName);
}
@Override
public void setTableProperties(ConnectorSession session, JdbcTableHandle handle, Map<String, Object> properties)
{
delegate().setTableProperties(session, handle, properties);
}
@Override
public void createTable(ConnectorSession session, ConnectorTableMetadata tableMetadata)
{
delegate().createTable(session, tableMetadata);
}
@Override
public void createSchema(ConnectorSession session, String schemaName)
{
delegate().createSchema(session, schemaName);
}
@Override
public void dropSchema(ConnectorSession session, String schemaName)
{
delegate().dropSchema(session, schemaName);
}
@Override
public Optional<SystemTable> getSystemTable(ConnectorSession session, SchemaTableName tableName)
{
return delegate().getSystemTable(session, tableName);
}
@Override
public String quoted(String name)
{
return delegate().quoted(name);
}
@Override
public String quoted(RemoteTableName remoteTableName)
{
return delegate().quoted(remoteTableName);
}
@Override
public Map<String, Object> getTableProperties(ConnectorSession session, JdbcTableHandle tableHandle)
{
return delegate().getTableProperties(session, tableHandle);
}
@Override
public Optional<TableScanRedirectApplicationResult> getTableScanRedirection(ConnectorSession session, JdbcTableHandle tableHandle)
{
return delegate().getTableScanRedirection(session, tableHandle);
}
@Override
public OptionalLong delete(ConnectorSession session, JdbcTableHandle handle)
{
return delegate().delete(session, handle);
}
@Override
public void truncateTable(ConnectorSession session, JdbcTableHandle handle)
{
delegate().truncateTable(session, handle);
}
}
| |
package com.greenpepper.document;
import junit.framework.TestCase;
import com.greenpepper.Example;
import com.greenpepper.util.ExampleUtil;
import com.greenpepper.util.Tables;
public class GreenPepperTableFilterTest extends TestCase
{
public void testShouldSkipSpecificationInLazyModeIfNoGreenPepperTestTags()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(true);
Example example = Tables.parse(
"[table]\n" +
"[with rows]\n"
);
assertTrue( filter.canFilter(example) );
assertNull( filter.filter(example) );
}
public void testShouldSkipSpecificationInLazyModeIfNoGreenPepperEndTestTag()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(true);
Example example = Tables.parse(
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table]\n" +
"[with rows]\n"
);
assertTrue(filter.canFilter(example));
assertNull(filter.filter(example));
}
public void testShouldNotSkipSpecificationInEagerModeIfNoGreenPepperTestTags()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(false);
Example example = Tables.parse(
"[table]\n" +
"[with rows]\n"
);
assertFalse(filter.canFilter(example));
assertEquals("table", ExampleUtil.contentOf(filter.filter(example).at(0, 0, 0)));
}
public void testShouldNotSkipSpecificationInLazyModeIfNoGreenPepperEndTestTag()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(false);
Example example = Tables.parse(
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table]\n" +
"[with rows]\n"
);
assertTrue(filter.canFilter(example));
assertEquals("table", ExampleUtil.contentOf(filter.filter(example).at(0, 0, 0)));
}
public void testShouldSkipSpecificationInLazyModeToEndGreenPepperTag()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(true);
Example example = Tables.parse(
"[table]\n" +
"[with rows]\n" +
"****\n" +
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table1]\n" +
"****\n" +
"[" + GreenPepperTableFilter.END_GP_TEST + "]\n"
);
assertTrue(filter.canFilter(example));
Example filtered = filter.filter(example);
assertNotNull(filtered);
assertEquals( "table1", ExampleUtil.contentOf(filtered.at(0,0,0)) );
}
public void testShouldSkipGreenPepperTagInNotLazyMode()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(false);
Example example = Tables.parse(
"[table]\n" +
"[with rows]\n" +
"****\n" +
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table1]\n"
);
assertFalse(filter.canFilter(example));
Example filtered = example;
filtered = filter.filter(filtered);
assertEquals("table", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
assertTrue(filter.canFilter(filtered.nextSibling()));
filtered = filter.filter(filtered.nextSibling());
assertNotNull(filtered);
assertEquals("table1", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
}
public void testShouldGetOnlyElementsWithingGreenPepperTags()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(true);
Example example = Tables.parse(
"[table]\n" +
"[with rows]\n" +
"****\n" +
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table1]\n" +
"****\n" +
"[" + GreenPepperTableFilter.END_GP_TEST + "]\n" +
"****\n" +
"[table2]\n" +
"****\n"
);
assertTrue(filter.canFilter(example));
Example filtered = filter.filter(example);
assertNotNull(filtered);
assertEquals("table1", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
assertTrue(filter.canFilter(filtered.nextSibling()));
filtered = filter.filter(filtered.nextSibling());
assertNull(filtered);
}
public void testShouldSkipBeginAndEndGreenPepperTags()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(false);
Example example = Tables.parse(
"[table]\n" +
"[with rows]\n" +
"****\n" +
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table1]\n" +
"****\n" +
"[" + GreenPepperTableFilter.END_GP_TEST + "]\n" +
"****\n" +
"[table2]\n" +
"****\n"
);
Example filtered = filter.filter(example);
assertNotNull(filtered);
assertEquals("table", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
filtered = filter.filter(filtered.nextSibling());
assertNotNull(filtered);
assertEquals("table1", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
filtered = filter.filter(filtered.nextSibling());
assertNotNull(filtered);
assertEquals("table2", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
}
public void testShouldGetAllElementsAfterBeginGreenPepperTagToTheEndGreenPepperTag()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(true);
Example example = Tables.parse(
"[table]\n" +
"[with rows]\n" +
"****\n" +
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table1]\n" +
"****\n" +
"[table2]\n" +
"****\n" +
"[" + GreenPepperTableFilter.END_GP_TEST + "]\n"
);
assertTrue(filter.canFilter(example));
Example filtered = filter.filter(example);
assertNotNull(filtered);
assertEquals("table1", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
assertFalse(filter.canFilter(filtered.nextSibling()));
filtered = filter.filter(filtered.nextSibling());
assertEquals("table2", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
assertTrue(filter.canFilter(filtered.nextSibling()));
filtered = filter.filter(filtered.nextSibling());
assertNull(filtered);
}
public void testWeCanCombineGreenPepperTags()
{
GreenPepperTableFilter filter = new GreenPepperTableFilter(true);
Example example = Tables.parse(
"[table]\n" +
"[with rows]\n" +
"****\n" +
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table1]\n" +
"****\n" +
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table2]\n" +
"****\n" +
"[" + GreenPepperTableFilter.BEGIN_GP_TEST + "]\n" +
"****\n" +
"[table2bis]\n" +
"****\n" +
"[" + GreenPepperTableFilter.END_GP_TEST + "]\n" +
"****\n" +
"[" + GreenPepperTableFilter.END_GP_TEST + "]\n" +
"****\n" +
"[" + GreenPepperTableFilter.END_GP_TEST + "]\n" +
"****\n" +
"[table3]\n" +
"****\n" +
"[" + GreenPepperTableFilter.END_GP_TEST + "]\n" +
"****\n" +
"[table4]\n"
);
assertTrue(filter.canFilter(example));
Example filtered = filter.filter(example);
assertNotNull(filtered);
assertEquals("table1", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
assertTrue(filter.canFilter(filtered.nextSibling()));
filtered = filter.filter(filtered.nextSibling());
assertEquals("table2", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
assertTrue(filter.canFilter(filtered.nextSibling()));
filtered = filter.filter(filtered.nextSibling());
assertEquals("table2bis", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
assertTrue(filter.canFilter(filtered.nextSibling()));
filtered = filter.filter(filtered.nextSibling());
assertNotNull(filtered);
assertEquals("table3", ExampleUtil.contentOf(filtered.at(0, 0, 0)));
assertTrue(filter.canFilter(filtered.nextSibling()));
filtered = filter.filter(filtered.nextSibling());
assertNull(filtered);
}
}
| |
/*-
*
* * Copyright 2015 Skymind,Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.deeplearning4j.clustering.berkeley;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.util.*;
import java.util.concurrent.ArrayBlockingQueue;
public class Iterators {
public static <T> void fillList(Iterator<? extends T> it, List<T> lst) {
while (it.hasNext()) {
lst.add(it.next());
}
}
public static <T> List<T> fillList(Iterator<? extends T> it) {
List<T> lst = new ArrayList<>();
fillList(it, lst);
return lst;
}
/**
* WraTps a base iterator with a transformation function.
*/
public static abstract class Transform<S, T> implements Iterator<T> {
private Iterator<S> base;
public Transform(Iterator<S> base) {
this.base = base;
}
public boolean hasNext() {
return base.hasNext();
}
public T next() {
return transform(base.next());
}
protected abstract T transform(S next);
public void remove() {
base.remove();
}
}
private Iterators() {}
/**
* Wraps an iterator as an iterable
*
* @param <T>
* @param it
* @return
*/
public static <T> Iterable<T> newIterable(final Iterator<T> it) {
return new Iterable<T>() {
public Iterator<T> iterator() {
return it;
}
};
}
/**
* Wraps an iterator as an iterable
*
* @param <T>
* @param it
* @return
*/
public static <T> Iterable<T> able(final Iterator<T> it) {
return new Iterable<T>() {
boolean used = false;
public Iterator<T> iterator() {
if (used)
throw new RuntimeException("One use iterable");
used = true;
return it;
}
};
}
/**
* Executes calls to next() in a different thread
*
* @param <T>
* @param base
* @param numThreads
* @return
*/
public static <T> Iterator<T> thread(final Iterator<T> base) {
return new Iterator<T>() {
ArrayBlockingQueue<T> els = new ArrayBlockingQueue<>(2);
private boolean finishedLoading = false;
private boolean running = false;
Thread thread = new Thread(new Runnable() {
public void run() {
while (base.hasNext()) {
try {
els.put(base.next());
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
finishedLoading = true;
}
});
public boolean hasNext() {
return !(finishedLoading && els.isEmpty());
}
public T next() {
if (!running)
thread.start();
running = true;
try {
return els.take();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
public static <S, T> Iterator<Pair<S, T>> zip(final Iterator<S> s, final Iterator<T> t) {
return new Iterator<Pair<S, T>>() {
public boolean hasNext() {
return s.hasNext() && t.hasNext();
}
public Pair<S, T> next() {
return Pair.newPair(s.next(), t.next());
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
/**
* Provides a max number of elements for an underlying base iterator.
*/
public static <T> Iterator<T> maxLengthIterator(final Iterator<T> base, final int max) {
return new Iterator<T>() {
int count = 0;
public boolean hasNext() {
return base.hasNext() && count < max;
}
public T next() {
if (!hasNext())
throw new NoSuchElementException("No more elements");
count++;
return base.next();
}
public void remove() {
throw new UnsupportedOperationException();
// TODO Maybe this should behave in a more friendly manner
}
};
}
/**
* Wraps a two-level iteration scenario in an iterator. Each key of the keys
* iterator returns an iterator (via the factory) over T's.
*
* The IteratorIterator loops through the iterator associated with each key
* until all the keys are used up.
*/
public static class IteratorIterator<T> implements Iterator<T> {
Iterator<T> current = null;
Iterator keys;
Factory<Iterator<T>> iterFactory;
public IteratorIterator(Iterator keys, Factory<Iterator<T>> iterFactory) {
this.keys = keys;
this.iterFactory = iterFactory;
current = getNextIterator();
}
private Iterator<T> getNextIterator() {
Iterator<T> next = null;
while (next == null) {
if (!keys.hasNext())
break;
next = iterFactory.newInstance(keys.next());
if (!next.hasNext())
next = null;
}
return next;
}
public boolean hasNext() {
return current != null;
}
public T next() {
T next = current.next();
if (!current.hasNext())
current = getNextIterator();
return next;
}
public void remove() {
throw new UnsupportedOperationException();
}
}
/**
* Creates an iterator that only returns items of a base iterator that pass
* a filter.
*
* Null items cannot be returned from the base iterator.
*/
public static class FilteredIterator<T> implements Iterator<T> {
Filter<T> filter;
T next;
private Iterator<T> base;
public FilteredIterator(Filter<T> filter, Iterator<T> base) {
super();
this.filter = filter;
this.base = base;
loadNext();
}
public FilteredIterator(Filter<T> filter, Iterable<T> items) {
this(filter, items.iterator());
}
private void loadNext() {
next = null;
while (next == null && base.hasNext()) {
next = base.next();
if (!filter.accept(next))
next = null;
}
}
public boolean hasNext() {
return next != null;
}
public T next() {
T old = next;
loadNext();
return old;
}
public void remove() {
throw new UnsupportedOperationException();
}
}
public static class TransformingIterator<I, O> implements Iterator<O> {
private MyMethod<I, O> transformer;
private Iterator<I> inputIterator;
public TransformingIterator(Iterator<I> inputIterator, MyMethod<I, O> transformer) {
this.inputIterator = inputIterator;
this.transformer = transformer;
}
public boolean hasNext() {
return inputIterator.hasNext();
}
public O next() {
return transformer.call(inputIterator.next());
}
public void remove() {
inputIterator.remove();
}
}
public static <T> Iterator<T> filter(Iterator<T> iterator, Filter<T> filter) {
return new FilteredIterator<>(filter, iterator);
}
public static <T> Iterator<T> concat(Iterable<Iterator<? extends T>> args) {
Factory<Iterator<T>> factory = new Factory<Iterator<T>>() {
public Iterator<T> newInstance(Object... args) {
return (Iterator<T>) args[0];
}
};
return new IteratorIterator<>(Arrays.asList(args).iterator(), factory);
}
public static <T> Iterator<T> concat(Iterator<? extends T>... args) {
Factory<Iterator<T>> factory = new Factory<Iterator<T>>() {
public Iterator<T> newInstance(Object... args) {
return (Iterator<T>) args[0];
}
};
return new IteratorIterator<>(Arrays.asList(args).iterator(), factory);
}
public static <U> Iterator<U> oneItemIterator(final U item) {
return new Iterator<U>() {
boolean unused = true;
public boolean hasNext() {
return unused;
}
public U next() {
unused = false;
return item;
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
public static Iterator emptyIterator() {
return new Iterator() {
public boolean hasNext() {
return false;
}
public Object next() {
throw new NoSuchElementException();
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
public static <T> Iterable<T> concat(Iterable<T> a, Iterable<T> b) {
return able(concat(a.iterator(), b.iterator()));
}
public static <T> List<T> nextList(List<Iterator<T>> iterators) {
List<T> items = new ArrayList<>(iterators.size());
for (Iterator<T> iter : iterators) {
items.add(iter.next());
}
return items;
}
public static Iterator<Object> objectIterator(final ObjectInputStream instream) {
return new Iterator<Object>() {
Object next = softRead();
public boolean hasNext() {
return next != null;
}
private Object softRead() {
try {
return instream.readObject();
} catch (IOException e) {
return null;
} catch (ClassNotFoundException e) {
return null;
}
}
public Object next() {
Object curr = next;
next = softRead();
return curr;
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import com.facebook.buck.cxx.CxxLinkableEnhancer;
import com.facebook.buck.cxx.CxxPlatformUtils;
import com.facebook.buck.cxx.CxxPreprocessAndCompile;
import com.facebook.buck.cxx.CxxSource;
import com.facebook.buck.cxx.CxxSourceRuleFactory;
import com.facebook.buck.cxx.Linker;
import com.facebook.buck.cxx.NativeLinkableInput;
import com.facebook.buck.io.AlwaysFoundExecutableFinder;
import com.facebook.buck.io.MoreFiles;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.model.Pair;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.keys.DefaultRuleKeyFactory;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.facebook.buck.testutil.integration.TemporaryPaths;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import org.hamcrest.Matchers;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
public class NdkCxxPlatformTest {
@Rule
public TemporaryPaths tmp = new TemporaryPaths();
enum Operation {
COMPILE,
PREPROCESS_AND_COMPILE,
}
// Create and return some rule keys from a dummy source for the given platforms.
private ImmutableMap<NdkCxxPlatforms.TargetCpuType, RuleKey> constructCompileRuleKeys(
Operation operation,
ImmutableMap<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> cxxPlatforms) {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
SourcePathResolver pathResolver = new SourcePathResolver(ruleFinder);
String source = "source.cpp";
DefaultRuleKeyFactory ruleKeyFactory =
new DefaultRuleKeyFactory(
0,
FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>builder()
.put("source.cpp", Strings.repeat("a", 40))
.build()),
pathResolver,
ruleFinder);
BuildTarget target = BuildTargetFactory.newInstance("//:target");
ImmutableMap.Builder<NdkCxxPlatforms.TargetCpuType, RuleKey> ruleKeys =
ImmutableMap.builder();
for (Map.Entry<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> entry : cxxPlatforms.entrySet()) {
CxxSourceRuleFactory cxxSourceRuleFactory = CxxSourceRuleFactory.builder()
.setParams(new FakeBuildRuleParamsBuilder(target).build())
.setResolver(resolver)
.setPathResolver(pathResolver)
.setRuleFinder(ruleFinder)
.setCxxBuckConfig(CxxPlatformUtils.DEFAULT_CONFIG)
.setCxxPlatform(entry.getValue().getCxxPlatform())
.setPicType(CxxSourceRuleFactory.PicType.PIC)
.build();
CxxPreprocessAndCompile rule;
switch (operation) {
case PREPROCESS_AND_COMPILE:
rule =
cxxSourceRuleFactory.createPreprocessAndCompileBuildRule(
source,
CxxSource.of(
CxxSource.Type.CXX,
new FakeSourcePath(source),
ImmutableList.of()));
break;
case COMPILE:
rule =
cxxSourceRuleFactory.createCompileBuildRule(
source,
CxxSource.of(
CxxSource.Type.CXX_CPP_OUTPUT,
new FakeSourcePath(source),
ImmutableList.of()));
break;
default:
throw new IllegalStateException();
}
ruleKeys.put(entry.getKey(), ruleKeyFactory.build(rule));
}
return ruleKeys.build();
}
// Create and return some rule keys from a dummy source for the given platforms.
private ImmutableMap<NdkCxxPlatforms.TargetCpuType, RuleKey> constructLinkRuleKeys(
ImmutableMap<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> cxxPlatforms)
throws NoSuchBuildTargetException {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(resolver);
SourcePathResolver pathResolver = new SourcePathResolver(ruleFinder);
DefaultRuleKeyFactory ruleKeyFactory =
new DefaultRuleKeyFactory(
0,
FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>builder()
.put("input.o", Strings.repeat("a", 40))
.build()),
pathResolver,
ruleFinder);
BuildTarget target = BuildTargetFactory.newInstance("//:target");
ImmutableMap.Builder<NdkCxxPlatforms.TargetCpuType, RuleKey> ruleKeys =
ImmutableMap.builder();
for (Map.Entry<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> entry : cxxPlatforms.entrySet()) {
BuildRule rule = CxxLinkableEnhancer.createCxxLinkableBuildRule(
CxxPlatformUtils.DEFAULT_CONFIG,
entry.getValue().getCxxPlatform(),
new FakeBuildRuleParamsBuilder(target).build(),
resolver,
pathResolver,
ruleFinder,
target,
Linker.LinkType.EXECUTABLE,
Optional.empty(),
Paths.get("output"),
Linker.LinkableDepType.SHARED,
/* thinLto */ false,
ImmutableList.of(),
Optional.empty(),
Optional.empty(),
ImmutableSet.of(),
NativeLinkableInput.builder()
.setArgs(SourcePathArg.from(new FakeSourcePath("input.o")))
.build());
ruleKeys.put(entry.getKey(), ruleKeyFactory.build(rule));
}
return ruleKeys.build();
}
@Test
public void testNdkMajorVersion() {
assertEquals(9, NdkCxxPlatforms.getNdkMajorVersion("r9"));
assertEquals(9, NdkCxxPlatforms.getNdkMajorVersion("r9b"));
assertEquals(10, NdkCxxPlatforms.getNdkMajorVersion("r10c"));
assertEquals(10, NdkCxxPlatforms.getNdkMajorVersion("r10e"));
assertEquals(11, NdkCxxPlatforms.getNdkMajorVersion("11.0.1234"));
assertEquals(11, NdkCxxPlatforms.getNdkMajorVersion("11.2.2725575"));
assertEquals(12, NdkCxxPlatforms.getNdkMajorVersion("12.0.1234"));
assertEquals(12, NdkCxxPlatforms.getNdkMajorVersion("12.1.2977051"));
}
// The important aspects we check for in rule keys is that the host platform and the path
// to the NDK don't cause changes.
@Test
public void checkRootAndPlatformDoNotAffectRuleKeys() throws Exception {
ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRoot());
// Test all major compiler and runtime combinations.
ImmutableList<Pair<NdkCxxPlatformCompiler.Type, NdkCxxPlatforms.CxxRuntime>> configs =
ImmutableList.of(
new Pair<>(NdkCxxPlatformCompiler.Type.GCC, NdkCxxPlatforms.CxxRuntime.GNUSTL),
new Pair<>(NdkCxxPlatformCompiler.Type.CLANG, NdkCxxPlatforms.CxxRuntime.GNUSTL),
new Pair<>(NdkCxxPlatformCompiler.Type.CLANG, NdkCxxPlatforms.CxxRuntime.LIBCXX));
for (Pair<NdkCxxPlatformCompiler.Type, NdkCxxPlatforms.CxxRuntime> config : configs) {
Map<String, ImmutableMap<NdkCxxPlatforms.TargetCpuType, RuleKey>>
preprocessAndCompileRukeKeys = new HashMap<>();
Map<String, ImmutableMap<NdkCxxPlatforms.TargetCpuType, RuleKey>>
compileRukeKeys = new HashMap<>();
Map<String, ImmutableMap<NdkCxxPlatforms.TargetCpuType, RuleKey>>
linkRukeKeys = new HashMap<>();
// Iterate building up rule keys for combinations of different platforms and NDK root
// directories.
for (String dir : ImmutableList.of("android-ndk-r9c", "android-ndk-r10b")) {
for (Platform platform :
ImmutableList.of(Platform.LINUX, Platform.MACOS, Platform.WINDOWS)) {
Path root = tmp.newFolder(dir);
MoreFiles.writeLinesToFile(ImmutableList.of("r9c"), root.resolve("RELEASE.TXT"));
ImmutableMap<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> platforms =
NdkCxxPlatforms.getPlatforms(
CxxPlatformUtils.DEFAULT_CONFIG,
filesystem,
root,
NdkCxxPlatformCompiler.builder()
.setType(config.getFirst())
.setVersion("gcc-version")
.setGccVersion("clang-version")
.build(),
NdkCxxPlatforms.CxxRuntime.GNUSTL,
"target-app-platform",
ImmutableSet.of("x86"),
platform,
new AlwaysFoundExecutableFinder(),
/* strictToolchainPaths */ false);
preprocessAndCompileRukeKeys.put(
String.format("NdkCxxPlatform(%s, %s)", dir, platform),
constructCompileRuleKeys(Operation.PREPROCESS_AND_COMPILE, platforms));
compileRukeKeys.put(
String.format("NdkCxxPlatform(%s, %s)", dir, platform),
constructCompileRuleKeys(Operation.COMPILE, platforms));
linkRukeKeys.put(
String.format("NdkCxxPlatform(%s, %s)", dir, platform),
constructLinkRuleKeys(platforms));
MoreFiles.deleteRecursively(root);
}
}
// If everything worked, we should be able to collapse all the generated rule keys down
// to a singleton set.
assertThat(
Arrays.toString(preprocessAndCompileRukeKeys.entrySet().toArray()),
Sets.newHashSet(preprocessAndCompileRukeKeys.values()),
Matchers.hasSize(1));
assertThat(
Arrays.toString(compileRukeKeys.entrySet().toArray()),
Sets.newHashSet(compileRukeKeys.values()),
Matchers.hasSize(1));
assertThat(
Arrays.toString(linkRukeKeys.entrySet().toArray()),
Sets.newHashSet(linkRukeKeys.values()),
Matchers.hasSize(1));
}
}
@Test
public void headerVerificationWhitelistsNdkRoot() throws IOException {
ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRoot());
String dir = "android-ndk-r9c";
Path root = tmp.newFolder(dir);
MoreFiles.writeLinesToFile(ImmutableList.of("r9c"), root.resolve("RELEASE.TXT"));
ImmutableMap<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> platforms =
NdkCxxPlatforms.getPlatforms(
CxxPlatformUtils.DEFAULT_CONFIG,
filesystem,
root,
NdkCxxPlatformCompiler.builder()
.setType(NdkCxxPlatformCompiler.Type.GCC)
.setVersion("gcc-version")
.setGccVersion("clang-version")
.build(),
NdkCxxPlatforms.CxxRuntime.GNUSTL,
"target-app-platform",
ImmutableSet.of("x86"),
Platform.LINUX,
new AlwaysFoundExecutableFinder(),
/* strictToolchainPaths */ false);
for (NdkCxxPlatform ndkCxxPlatform : platforms.values()) {
assertTrue(
ndkCxxPlatform.getCxxPlatform().getHeaderVerification()
.isWhitelisted(root.resolve("test.h").toString()));
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.datatorrent.lib.io.fs;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.TreeSet;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.apache.apex.malhar.lib.fs.LineByLineFileInputOperator;
import org.apache.apex.malhar.lib.wal.FSWindowDataManager;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.datatorrent.api.Attribute;
import com.datatorrent.api.Context;
import com.datatorrent.api.DefaultPartition;
import com.datatorrent.api.Partitioner.Partition;
import com.datatorrent.api.StatsListener;
import com.datatorrent.lib.helper.OperatorContextTestHelper;
import com.datatorrent.lib.io.fs.AbstractFileInputOperator.DirectoryScanner;
import com.datatorrent.lib.partitioner.StatelessPartitionerTest.PartitioningContextImpl;
import com.datatorrent.lib.testbench.CollectorTestSink;
import com.datatorrent.lib.util.TestUtils;
public class AbstractFileInputOperatorTest
{
public static class TestMeta extends TestWatcher
{
public String dir = null;
Context.OperatorContext context;
@Override
protected void starting(org.junit.runner.Description description)
{
TestUtils.deleteTargetTestClassFolder(description);
String methodName = description.getMethodName();
String className = description.getClassName();
this.dir = "target/" + className + "/" + methodName;
Attribute.AttributeMap attributes = new Attribute.AttributeMap.DefaultAttributeMap();
attributes.put(Context.DAGContext.APPLICATION_PATH, dir);
context = new OperatorContextTestHelper.TestIdOperatorContext(1, attributes);
}
@Override
protected void finished(Description description)
{
TestUtils.deleteTargetTestClassFolder(description);
}
}
@Rule
public TestMeta testMeta = new TestMeta();
@Test
public void testSinglePartitonRecursive() throws Exception
{
checkSubDir(true);
}
@Test
public void testSinglePartiton() throws Exception
{
checkSubDir(false);
}
private void checkSubDir(boolean recursive) throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
HashSet<String> allLines = Sets.newHashSet();
String subdir = "";
for (int file = 0; file < 2; file++) {
subdir += String.format("/depth_%d", file);
HashSet<String> lines = Sets.newHashSet();
for (int line = 0; line < 2; line++) {
lines.add("f" + file + "l" + line);
}
allLines.addAll(lines);
FileUtils.write(new File(testMeta.dir + subdir, "file" + file), StringUtils.join(lines, '\n'));
}
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({"unchecked", "rawtypes"})
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
oper.setDirectory(testMeta.dir);
oper.getScanner().setFilePatternRegexp("((?!target).)*file[\\d]");
oper.getScanner().setRecursive(recursive);
oper.setup(testMeta.context);
for (long wid = 0; wid < 3; wid++) {
oper.beginWindow(wid);
oper.emitTuples();
oper.endWindow();
}
oper.teardown();
int expectedNumTuples = 4;
if (!recursive) {
allLines = new HashSet<String>();
expectedNumTuples = 0;
}
Assert.assertEquals("number tuples", expectedNumTuples, queryResults.collectedTuples.size());
Assert.assertEquals("lines", allLines, new HashSet<String>(queryResults.collectedTuples));
}
@Test
public void testScannerPartitioning() throws Exception
{
DirectoryScanner scanner = new DirectoryScanner();
scanner.setFilePatternRegexp(".*partition([\\d]*)");
Path path = new Path(new File(testMeta.dir).getAbsolutePath());
FileContext.getLocalFSFileContext().delete(path, true);
for (int file = 0; file < 4; file++) {
FileUtils.write(new File(testMeta.dir, "partition00" + file), "");
}
FileSystem fs = FileSystem.get(FileContext.getLocalFSFileContext().getDefaultFileSystem().getUri(), new Configuration());
List<DirectoryScanner> partitions = scanner.partition(2);
Set<Path> allFiles = Sets.newHashSet();
for (DirectoryScanner partition : partitions) {
Set<Path> files = partition.scan(fs, path, Sets.<String>newHashSet());
Assert.assertEquals("", 2, files.size());
allFiles.addAll(files);
}
Assert.assertEquals("Found all files " + allFiles, 4, allFiles.size());
}
@Test
public void testPartitioning() throws Exception
{
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.getScanner().setFilePatternRegexp(".*partition([\\d]*)");
oper.setDirectory(new File(testMeta.dir).getAbsolutePath());
Path path = new Path(new File(testMeta.dir).getAbsolutePath());
FileContext.getLocalFSFileContext().delete(path, true);
for (int file = 0; file < 4; file++) {
FileUtils.write(new File(testMeta.dir, "partition00" + file), "");
}
List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = oper.definePartitions(partitions,
new PartitioningContextImpl(null, 2));
Assert.assertEquals(2, newPartitions.size());
Assert.assertEquals(1, oper.getCurrentPartitions()); // partitioned() wasn't called
for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
Assert.assertNotSame(oper, p.getPartitionedInstance());
Assert.assertNotSame(oper.getScanner(), p.getPartitionedInstance().getScanner());
Set<String> consumed = Sets.newHashSet();
LinkedHashSet<Path> files = p.getPartitionedInstance().getScanner().scan(FileSystem.getLocal(new Configuration(false)), path, consumed);
Assert.assertEquals("partition " + files, 2, files.size());
}
}
/**
* Test for testing dynamic partitioning.
* - Create 4 file with 3 records each.
* - Create a single partition, and read all records, populating pending files in operator.
* - Split it in two operators
* - Try to emit records again, expected result is no record is emitted, as all files are
* processed.
* - Create another 4 files with 3 records each
* - Try to emit records again, expected result total record emitted 4 * 3 = 12.
*/
@Test
public void testPartitioningStateTransfer() throws Exception
{
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.getScanner().setFilePatternRegexp(".*partition([\\d]*)");
oper.setDirectory(new File(testMeta.dir).getAbsolutePath());
oper.setScanIntervalMillis(0);
LineByLineFileInputOperator initialState = new Kryo().copy(oper);
// Create 4 files with 3 records each.
Path path = new Path(new File(testMeta.dir).getAbsolutePath());
FileContext.getLocalFSFileContext().delete(path, true);
int file;
for (file = 0; file < 4; file++) {
FileUtils.write(new File(testMeta.dir, "partition00" + file), "a\nb\nc\n");
}
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({"unchecked", "rawtypes"})
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
int wid = 0;
// Read all records to populate processedList in operator.
oper.setup(testMeta.context);
for (int i = 0; i < 10; i++) {
oper.beginWindow(wid);
oper.emitTuples();
oper.endWindow();
wid++;
}
Assert.assertEquals("All tuples read ", 12, sink.collectedTuples.size());
Assert.assertEquals(1, initialState.getCurrentPartitions());
initialState.setPartitionCount(2);
StatsListener.Response rsp = initialState.processStats(null);
Assert.assertEquals(true, rsp.repartitionRequired);
// Create partitions of the operator.
List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
// incremental capacity controlled partitionCount property
Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = initialState.definePartitions(partitions,
new PartitioningContextImpl(null, 0));
Assert.assertEquals(2, newPartitions.size());
Assert.assertEquals(1, initialState.getCurrentPartitions());
Map<Integer, Partition<AbstractFileInputOperator<String>>> m = Maps.newHashMap();
for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
m.put(m.size(), p);
}
initialState.partitioned(m);
Assert.assertEquals(2, initialState.getCurrentPartitions());
/* Collect all operators in a list */
List<AbstractFileInputOperator<String>> opers = Lists.newArrayList();
for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
LineByLineFileInputOperator oi = (LineByLineFileInputOperator)p.getPartitionedInstance();
oi.setup(testMeta.context);
oi.output.setSink(sink);
opers.add(oi);
}
sink.clear();
for (int i = 0; i < 10; i++) {
for (AbstractFileInputOperator<String> o : opers) {
o.beginWindow(wid);
o.emitTuples();
o.endWindow();
}
wid++;
}
// No record should be read.
Assert.assertEquals("No new tuples read ", 0, sink.collectedTuples.size());
// Add four new files with 3 records each.
for (; file < 8; file++) {
FileUtils.write(new File(testMeta.dir, "partition00" + file), "a\nb\nc\n");
}
for (int i = 0; i < 10; i++) {
for (AbstractFileInputOperator<String> o : opers) {
o.beginWindow(wid);
o.emitTuples();
o.endWindow();
}
wid++;
}
// If all files are processed only once then number of records emitted should
// be 12.
Assert.assertEquals("All tuples read ", 12, sink.collectedTuples.size());
}
/**
* Test for testing dynamic partitioning.
* - Create 4 file with 3 records each.
* - Create a single partition, and read some records, populating pending files in operator.
* - Split it in two operators
* - Try to emit the remaining records.
*/
@Test
public void testPartitioningStateTransferInterrupted() throws Exception
{
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.getScanner().setFilePatternRegexp(".*partition([\\d]*)");
oper.setDirectory(new File(testMeta.dir).getAbsolutePath());
oper.setScanIntervalMillis(0);
oper.setEmitBatchSize(2);
LineByLineFileInputOperator initialState = new Kryo().copy(oper);
// Create 4 files with 3 records each.
Path path = new Path(new File(testMeta.dir).getAbsolutePath());
FileContext.getLocalFSFileContext().delete(path, true);
int file;
for (file = 0; file < 4; file++) {
FileUtils.write(new File(testMeta.dir, "partition00" + file), "a\nb\nc\n");
}
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({"unchecked", "rawtypes"})
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
int wid = 0;
//Read some records
oper.setup(testMeta.context);
for (int i = 0; i < 5; i++) {
oper.beginWindow(wid);
oper.emitTuples();
oper.endWindow();
wid++;
}
Assert.assertEquals("Partial tuples read ", 6, sink.collectedTuples.size());
Assert.assertEquals(1, initialState.getCurrentPartitions());
initialState.setPartitionCount(2);
StatsListener.Response rsp = initialState.processStats(null);
Assert.assertEquals(true, rsp.repartitionRequired);
// Create partitions of the operator.
List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
// incremental capacity controlled partitionCount property
Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = initialState.definePartitions(partitions, new PartitioningContextImpl(null, 0));
Assert.assertEquals(2, newPartitions.size());
Assert.assertEquals(1, initialState.getCurrentPartitions());
Map<Integer, Partition<AbstractFileInputOperator<String>>> m = Maps.newHashMap();
for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
m.put(m.size(), p);
}
initialState.partitioned(m);
Assert.assertEquals(2, initialState.getCurrentPartitions());
/* Collect all operators in a list */
List<AbstractFileInputOperator<String>> opers = Lists.newArrayList();
for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
LineByLineFileInputOperator oi = (LineByLineFileInputOperator)p.getPartitionedInstance();
oi.setup(testMeta.context);
oi.output.setSink(sink);
opers.add(oi);
}
sink.clear();
for (int i = 0; i < 10; i++) {
for (AbstractFileInputOperator<String> o : opers) {
o.beginWindow(wid);
o.emitTuples();
o.endWindow();
}
wid++;
}
Assert.assertEquals("Remaining tuples read ", 6, sink.collectedTuples.size());
}
/**
* Test for testing dynamic partitioning interrupting ongoing read.
* - Create 4 file with 3 records each.
* - Create a single partition, and read some records, populating pending files in operator.
* - Split it in two operators
* - Try to emit the remaining records.
*/
@Test
public void testPartitioningStateTransferFailure() throws Exception
{
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.getScanner().setFilePatternRegexp(".*partition([\\d]*)");
oper.setDirectory(new File(testMeta.dir).getAbsolutePath());
oper.setScanIntervalMillis(0);
oper.setEmitBatchSize(2);
LineByLineFileInputOperator initialState = new Kryo().copy(oper);
// Create 4 files with 3 records each.
Path path = new Path(new File(testMeta.dir).getAbsolutePath());
FileContext.getLocalFSFileContext().delete(path, true);
int file;
for (file = 0; file < 4; file++) {
FileUtils.write(new File(testMeta.dir, "partition00" + file), "a\nb\nc\n");
}
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({"unchecked", "rawtypes"})
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
int wid = 0;
//Read some records
oper.setup(testMeta.context);
for (int i = 0; i < 5; i++) {
oper.beginWindow(wid);
oper.emitTuples();
oper.endWindow();
wid++;
}
Assert.assertEquals("Partial tuples read ", 6, sink.collectedTuples.size());
Assert.assertEquals(1, initialState.getCurrentPartitions());
initialState.setPartitionCount(2);
StatsListener.Response rsp = initialState.processStats(null);
Assert.assertEquals(true, rsp.repartitionRequired);
// Create partitions of the operator.
List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
// incremental capacity controlled partitionCount property
Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = initialState.definePartitions(partitions, new PartitioningContextImpl(null, 0));
Assert.assertEquals(2, newPartitions.size());
Assert.assertEquals(1, initialState.getCurrentPartitions());
Map<Integer, Partition<AbstractFileInputOperator<String>>> m = Maps.newHashMap();
for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
m.put(m.size(), p);
}
initialState.partitioned(m);
Assert.assertEquals(2, initialState.getCurrentPartitions());
/* Collect all operators in a list */
List<AbstractFileInputOperator<String>> opers = Lists.newArrayList();
for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
LineByLineFileInputOperator oi = (LineByLineFileInputOperator)p.getPartitionedInstance();
oi.setup(testMeta.context);
oi.output.setSink(sink);
opers.add(oi);
}
sink.clear();
for (int i = 0; i < 10; i++) {
for (AbstractFileInputOperator<String> o : opers) {
o.beginWindow(wid);
o.emitTuples();
o.endWindow();
}
wid++;
}
// No record should be read.
Assert.assertEquals("Remaining tuples read ", 6, sink.collectedTuples.size());
}
@Test
public void testRecoveryWithFailedFile() throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
List<String> allLines = Lists.newArrayList();
HashSet<String> lines = Sets.newHashSet();
for (int line = 0; line < 5; line++) {
lines.add("f0" + "l" + line);
}
allLines.addAll(lines);
File testFile = new File(testMeta.dir, "file0");
FileUtils.write(testFile, StringUtils.join(lines, '\n'));
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.scanner = null;
oper.failedFiles.add(new AbstractFileInputOperator.FailedFile(testFile.getAbsolutePath(), 1));
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({ "unchecked", "rawtypes" })
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
oper.setDirectory(testMeta.dir);
oper.setup(testMeta.context);
oper.beginWindow(0);
oper.emitTuples();
oper.endWindow();
oper.teardown();
Assert.assertEquals("number tuples", 4, queryResults.collectedTuples.size());
Assert.assertEquals("lines", allLines.subList(1, allLines.size()), new ArrayList<String>(queryResults.collectedTuples));
}
@Test
public void testRecoveryWithUnfinishedFile() throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
List<String> allLines = Lists.newArrayList();
HashSet<String> lines = Sets.newHashSet();
for (int line = 0; line < 5; line++) {
lines.add("f0" + "l" + line);
}
allLines.addAll(lines);
File testFile = new File(testMeta.dir, "file0");
FileUtils.write(testFile, StringUtils.join(lines, '\n'));
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.scanner = null;
oper.unfinishedFiles.add(new AbstractFileInputOperator.FailedFile(testFile.getAbsolutePath(), 2));
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({"unchecked", "rawtypes"})
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
oper.setDirectory(testMeta.dir);
oper.setup(testMeta.context);
oper.beginWindow(0);
oper.emitTuples();
oper.endWindow();
oper.teardown();
Assert.assertEquals("number tuples", 3, queryResults.collectedTuples.size());
Assert.assertEquals("lines", allLines.subList(2, allLines.size()), new ArrayList<String>(queryResults.collectedTuples));
}
@Test
public void testRecoveryWithPendingFile() throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
List<String> allLines = Lists.newArrayList();
HashSet<String> lines = Sets.newHashSet();
for (int line = 0; line < 5; line++) {
lines.add("f0" + "l" + line);
}
allLines.addAll(lines);
File testFile = new File(testMeta.dir, "file0");
FileUtils.write(testFile, StringUtils.join(lines, '\n'));
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.scanner = null;
oper.pendingFiles.add(testFile.getAbsolutePath());
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({"unchecked", "rawtypes"})
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
oper.setDirectory(testMeta.dir);
oper.setup(testMeta.context);
oper.beginWindow(0);
oper.emitTuples();
oper.endWindow();
oper.teardown();
Assert.assertEquals("number tuples", 5, queryResults.collectedTuples.size());
Assert.assertEquals("lines", allLines, new ArrayList<String>(queryResults.collectedTuples));
}
@Test
public void testRecoveryWithCurrentFile() throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
List<String> allLines = Lists.newArrayList();
HashSet<String> lines = Sets.newHashSet();
for (int line = 0; line < 5; line++) {
lines.add("f0" + "l" + line);
}
allLines.addAll(lines);
File testFile = new File(testMeta.dir, "file0");
FileUtils.write(testFile, StringUtils.join(lines, '\n'));
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.scanner = null;
oper.currentFile = testFile.getAbsolutePath();
oper.offset = 1;
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({"unchecked", "rawtypes"})
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
oper.setDirectory(testMeta.dir);
oper.setup(testMeta.context);
oper.beginWindow(0);
oper.emitTuples();
oper.endWindow();
oper.teardown();
Assert.assertEquals("number tuples", 4, queryResults.collectedTuples.size());
Assert.assertEquals("lines", allLines.subList(1, allLines.size()), new ArrayList<String>(queryResults.collectedTuples));
}
@Test
public void testIdempotency() throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
List<String> allLines = Lists.newArrayList();
for (int file = 0; file < 2; file++) {
List<String> lines = Lists.newArrayList();
for (int line = 0; line < 2; line++) {
lines.add("f" + file + "l" + line);
}
allLines.addAll(lines);
FileUtils.write(new File(testMeta.dir, "file" + file), StringUtils.join(lines, '\n'));
}
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
FSWindowDataManager manager = new FSWindowDataManager();
manager.setStatePath(testMeta.dir + "/recovery");
oper.setWindowDataManager(manager);
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
TestUtils.setSink(oper.output, queryResults);
oper.setDirectory(testMeta.dir);
oper.getScanner().setFilePatternRegexp(".*file[\\d]");
oper.setup(testMeta.context);
for (long wid = 0; wid < 3; wid++) {
oper.beginWindow(wid);
oper.emitTuples();
oper.endWindow();
}
oper.teardown();
List<String> beforeRecovery = Lists.newArrayList(queryResults.collectedTuples);
queryResults.clear();
//idempotency part
oper.setup(testMeta.context);
for (long wid = 0; wid < 3; wid++) {
oper.beginWindow(wid);
oper.endWindow();
}
Assert.assertEquals("number tuples", 4, queryResults.collectedTuples.size());
Assert.assertEquals("lines", beforeRecovery, queryResults.collectedTuples);
oper.teardown();
}
@Test
public void testIdempotencyWithMultipleEmitTuples() throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
List<String> allLines = Lists.newArrayList();
for (int file = 0; file < 2; file++) {
List<String> lines = Lists.newArrayList();
for (int line = 0; line < 2; line++) {
lines.add("f" + file + "l" + line);
}
allLines.addAll(lines);
FileUtils.write(new File(testMeta.dir, "file" + file), StringUtils.join(lines, '\n'));
}
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
FSWindowDataManager manager = new FSWindowDataManager();
manager.setStatePath(testMeta.dir + "/recovery");
oper.setWindowDataManager(manager);
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
TestUtils.setSink(oper.output, queryResults);
oper.setDirectory(testMeta.dir);
oper.getScanner().setFilePatternRegexp(".*file[\\d]");
oper.setup(testMeta.context);
oper.beginWindow(0);
for (int i = 0; i < 3; i++) {
oper.emitTuples();
}
oper.endWindow();
oper.teardown();
List<String> beforeRecovery = Lists.newArrayList(queryResults.collectedTuples);
queryResults.clear();
//idempotency part
oper.setup(testMeta.context);
oper.beginWindow(0);
oper.endWindow();
Assert.assertEquals("number tuples", 4, queryResults.collectedTuples.size());
Assert.assertEquals("lines", beforeRecovery, queryResults.collectedTuples);
oper.teardown();
}
@Test
public void testIdempotencyWhenFileContinued() throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
List<String> lines = Lists.newArrayList();
for (int line = 0; line < 10; line++) {
lines.add("l" + line);
}
FileUtils.write(new File(testMeta.dir, "file0"), StringUtils.join(lines, '\n'));
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
FSWindowDataManager manager = new FSWindowDataManager();
manager.setStatePath(testMeta.dir + "/recovery");
oper.setEmitBatchSize(5);
oper.setWindowDataManager(manager);
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({"unchecked", "rawtypes"})
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
oper.setDirectory(testMeta.dir);
oper.getScanner().setFilePatternRegexp(".*file[\\d]");
oper.setup(testMeta.context);
int offset = 0;
for (long wid = 0; wid < 3; wid++) {
oper.beginWindow(wid);
oper.emitTuples();
oper.endWindow();
if (wid > 0) {
Assert.assertEquals("number tuples", 5, queryResults.collectedTuples.size());
Assert.assertEquals("lines", lines.subList(offset, offset + 5), queryResults.collectedTuples);
offset += 5;
}
sink.clear();
}
oper.teardown();
sink.clear();
//idempotency part
offset = 0;
oper.setup(testMeta.context);
for (long wid = 0; wid < 3; wid++) {
oper.beginWindow(wid);
oper.endWindow();
if (wid > 0) {
Assert.assertEquals("number tuples", 5, queryResults.collectedTuples.size());
Assert.assertEquals("lines", lines.subList(offset, offset + 5), queryResults.collectedTuples);
offset += 5;
}
sink.clear();
}
oper.teardown();
}
@Test
public void testStateWithIdempotency() throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
HashSet<String> allLines = Sets.newHashSet();
for (int file = 0; file < 3; file++) {
HashSet<String> lines = Sets.newHashSet();
for (int line = 0; line < 2; line++) {
lines.add("f" + file + "l" + line);
}
allLines.addAll(lines);
FileUtils.write(new File(testMeta.dir, "file" + file), StringUtils.join(lines, '\n'));
}
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
FSWindowDataManager manager = new FSWindowDataManager();
manager.setStatePath(testMeta.dir + "/recovery");
oper.setWindowDataManager(manager);
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
@SuppressWarnings({"unchecked", "rawtypes"})
CollectorTestSink<Object> sink = (CollectorTestSink)queryResults;
oper.output.setSink(sink);
oper.setDirectory(testMeta.dir);
oper.getScanner().setFilePatternRegexp(".*file[\\d]");
oper.setup(testMeta.context);
for (long wid = 0; wid < 4; wid++) {
oper.beginWindow(wid);
oper.emitTuples();
oper.endWindow();
}
oper.teardown();
sink.clear();
//idempotency part
oper.pendingFiles.add(new File(testMeta.dir, "file0").getAbsolutePath());
oper.failedFiles.add(new AbstractFileInputOperator.FailedFile(new File(testMeta.dir, "file1").getAbsolutePath(), 0));
oper.unfinishedFiles.add(new AbstractFileInputOperator.FailedFile(new File(testMeta.dir, "file2").getAbsolutePath(), 0));
oper.setup(testMeta.context);
for (long wid = 0; wid < 4; wid++) {
oper.beginWindow(wid);
oper.endWindow();
}
Assert.assertTrue("pending state", !oper.pendingFiles.contains("file0"));
for (AbstractFileInputOperator.FailedFile failedFile : oper.failedFiles) {
Assert.assertTrue("failed state", !failedFile.path.equals("file1"));
}
for (AbstractFileInputOperator.FailedFile unfinishedFile : oper.unfinishedFiles) {
Assert.assertTrue("unfinished state", !unfinishedFile.path.equals("file2"));
}
oper.teardown();
}
@Test
public void testIdempotencyWithCheckPoint() throws Exception
{
FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
List<String> lines = Lists.newArrayList();
int file = 0;
for (int line = 0; line < 5; line++) {
lines.add("f" + file + "l" + line);
}
FileUtils.write(new File(testMeta.dir, "file" + file), StringUtils.join(lines, '\n'));
file = 1;
lines = Lists.newArrayList();
for (int line = 0; line < 6; line++) {
lines.add("f" + file + "l" + line);
}
FileUtils.write(new File(testMeta.dir, "file" + file), StringUtils.join(lines, '\n'));
// empty file
file = 2;
lines = Lists.newArrayList();
FileUtils.write(new File(testMeta.dir, "file" + file), StringUtils.join(lines, '\n'));
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
FSWindowDataManager manager = new FSWindowDataManager();
manager.setStatePath(testMeta.dir + "/recovery");
oper.setWindowDataManager(manager);
oper.setDirectory(testMeta.dir);
oper.getScanner().setFilePatternRegexp(".*file[\\d]");
oper.setup(testMeta.context);
oper.setEmitBatchSize(3);
// sort the pendingFiles and ensure the ordering of the files scanned
DirectoryScannerNew newScanner = new DirectoryScannerNew();
oper.setScanner(newScanner);
// scan directory
oper.beginWindow(0);
oper.emitTuples();
oper.endWindow();
// emit f0l0, f0l1, f0l2
oper.beginWindow(1);
oper.emitTuples();
oper.endWindow();
//checkpoint the operator
ByteArrayOutputStream bos = new ByteArrayOutputStream();
LineByLineFileInputOperator checkPointOper = checkpoint(oper, bos);
// start saving output
CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
TestUtils.setSink(oper.output, queryResults);
// emit f0l3, f0l4, and closeFile(f0) in the same window
oper.beginWindow(2);
oper.emitTuples();
oper.endWindow();
List<String> beforeRecovery2 = Lists.newArrayList(queryResults.collectedTuples);
// emit f1l0, f1l1, f1l2
oper.beginWindow(3);
oper.emitTuples();
oper.endWindow();
List<String> beforeRecovery3 = Lists.newArrayList(queryResults.collectedTuples);
// emit f1l3, f1l4, f1l5
oper.beginWindow(4);
oper.emitTuples();
oper.endWindow();
List<String> beforeRecovery4 = Lists.newArrayList(queryResults.collectedTuples);
// closeFile(f1) in a new window
oper.beginWindow(5);
oper.emitTuples();
oper.endWindow();
List<String> beforeRecovery5 = Lists.newArrayList(queryResults.collectedTuples);
// empty file ops, closeFile(f2) in emitTuples() only
oper.beginWindow(6);
oper.emitTuples();
oper.endWindow();
List<String> beforeRecovery6 = Lists.newArrayList(queryResults.collectedTuples);
oper.teardown();
queryResults.clear();
//idempotency part
oper = restoreCheckPoint(checkPointOper, bos);
testMeta.context.getAttributes().put(Context.OperatorContext.ACTIVATION_WINDOW_ID, 1L);
oper.setup(testMeta.context);
TestUtils.setSink(oper.output, queryResults);
long startwid = testMeta.context.getAttributes().get(Context.OperatorContext.ACTIVATION_WINDOW_ID) + 1;
oper.beginWindow(startwid);
Assert.assertTrue(oper.currentFile == null);
oper.emitTuples();
oper.endWindow();
Assert.assertEquals("lines", beforeRecovery2, queryResults.collectedTuples);
oper.beginWindow(++startwid);
oper.emitTuples();
oper.endWindow();
Assert.assertEquals("lines", beforeRecovery3, queryResults.collectedTuples);
oper.beginWindow(++startwid);
oper.emitTuples();
oper.endWindow();
Assert.assertEquals("lines", beforeRecovery4, queryResults.collectedTuples);
oper.beginWindow(++startwid);
Assert.assertTrue(oper.currentFile == null);
oper.emitTuples();
oper.endWindow();
Assert.assertEquals("lines", beforeRecovery5, queryResults.collectedTuples);
oper.beginWindow(++startwid);
Assert.assertTrue(oper.currentFile == null);
oper.emitTuples();
oper.endWindow();
Assert.assertEquals("lines", beforeRecovery6, queryResults.collectedTuples);
Assert.assertEquals("number tuples", 8, queryResults.collectedTuples.size());
oper.teardown();
}
/**
* This method checkpoints the given operator.
* @param oper The operator to checkpoint.
* @param bos The ByteArrayOutputStream which saves the checkpoint data temporarily.
* @return new operator.
*/
public static LineByLineFileInputOperator checkpoint(LineByLineFileInputOperator oper, ByteArrayOutputStream bos) throws Exception
{
Kryo kryo = new Kryo();
Output loutput = new Output(bos);
kryo.writeObject(loutput, oper);
loutput.close();
Input lInput = new Input(bos.toByteArray());
@SuppressWarnings("unchecked")
LineByLineFileInputOperator checkPointedOper = kryo.readObject(lInput, oper.getClass());
lInput.close();
return checkPointedOper;
}
/**
* Restores the checkpointed operator.
* @param checkPointOper The checkpointed operator.
* @param bos The ByteArrayOutputStream which saves the checkpoint data temporarily.
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public static LineByLineFileInputOperator restoreCheckPoint(LineByLineFileInputOperator checkPointOper, ByteArrayOutputStream bos) throws Exception
{
Kryo kryo = new Kryo();
Input lInput = new Input(bos.toByteArray());
LineByLineFileInputOperator oper = kryo.readObject(lInput, checkPointOper.getClass());
lInput.close();
return oper;
}
@Test
public void testWindowDataManagerPartitioning() throws Exception
{
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.getScanner().setFilePatternRegexp(".*partition([\\d]*)");
oper.setDirectory(new File(testMeta.dir).getAbsolutePath());
oper.setWindowDataManager(new FSWindowDataManager());
oper.operatorId = 7;
Path path = new Path(new File(testMeta.dir).getAbsolutePath());
FileContext.getLocalFSFileContext().delete(path, true);
for (int file = 0; file < 4; file++) {
FileUtils.write(new File(testMeta.dir, "partition00" + file), "");
}
List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = oper.definePartitions(partitions, new PartitioningContextImpl(null, 2));
Assert.assertEquals(2, newPartitions.size());
Assert.assertEquals(1, oper.getCurrentPartitions());
List<FSWindowDataManager> storageManagers = Lists.newLinkedList();
for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
storageManagers.add((FSWindowDataManager)p.getPartitionedInstance().getWindowDataManager());
}
Assert.assertEquals("count of storage managers", 2, storageManagers.size());
int countOfDeleteManagers = 0;
FSWindowDataManager deleteManager = null;
for (FSWindowDataManager storageManager : storageManagers) {
if (storageManager.getDeletedOperators() != null) {
countOfDeleteManagers++;
deleteManager = storageManager;
}
}
Assert.assertEquals("count of delete managers", 1, countOfDeleteManagers);
Assert.assertNotNull("deleted operators manager", deleteManager);
Assert.assertEquals("deleted operators", Sets.newHashSet(7), deleteManager.getDeletedOperators());
}
/** scanner to extract partition id from start of the filename */
static class MyScanner extends AbstractFileInputOperator.DirectoryScanner
{
@Override
protected int getPartition(String filePathStr)
{
String[] parts = filePathStr.split("/");
parts = parts[parts.length - 1].split("_");
try {
int code = Integer.parseInt(parts[0]);
return code;
} catch (NumberFormatException ex) {
return super.getPartition(filePathStr);
}
}
}
/**
* Partition the operator in 2
* create ten files with index of the file at the start, i.e 1_file, 2_file .. etc.
* The scanner returns this index from getPartition method.
* each partition should read 5 files as file index are from 0 to 9 (including 0 and 9).
* @throws Exception
*/
@Test
public void testWithCustomScanner() throws Exception
{
LineByLineFileInputOperator oper = new LineByLineFileInputOperator();
oper.setScanner(new MyScanner());
oper.getScanner().setFilePatternRegexp(".*partition_([\\d]*)");
oper.setDirectory(new File(testMeta.dir).getAbsolutePath());
Random rand = new Random();
Path path = new Path(new File(testMeta.dir).getAbsolutePath());
FileContext.getLocalFSFileContext().delete(path, true);
for (int file = 0; file < 10; file++) {
FileUtils.write(new File(testMeta.dir, file + "_partition_00" + rand.nextInt(100)), "");
}
List<Partition<AbstractFileInputOperator<String>>> partitions = Lists.newArrayList();
partitions.add(new DefaultPartition<AbstractFileInputOperator<String>>(oper));
Collection<Partition<AbstractFileInputOperator<String>>> newPartitions = oper.definePartitions(partitions,
new PartitioningContextImpl(null, 2));
Assert.assertEquals(2, newPartitions.size());
Assert.assertEquals(1, oper.getCurrentPartitions()); // partitioned() wasn't called
for (Partition<AbstractFileInputOperator<String>> p : newPartitions) {
Assert.assertNotSame(oper, p.getPartitionedInstance());
Assert.assertNotSame(oper.getScanner(), p.getPartitionedInstance().getScanner());
Set<String> consumed = Sets.newHashSet();
LinkedHashSet<Path> files = p.getPartitionedInstance().getScanner().scan(FileSystem.getLocal(new Configuration(false)), path, consumed);
Assert.assertEquals("partition " + files, 5, files.size());
}
}
@Test
public void testCustomScanner()
{
MyScanner scanner = new MyScanner();
scanner.setPartitionCount(2);
scanner.setPartitionIndex(1);
boolean accepted = scanner.acceptFile("1_file");
Assert.assertTrue("File should be accepted by this partition ", accepted);
scanner.setPartitionIndex(0);
accepted = scanner.acceptFile("1_file");
Assert.assertFalse("File should not be accepted by this partition ", accepted);
}
private static class DirectoryScannerNew extends DirectoryScanner
{
public LinkedHashSet<Path> scan(FileSystem fs, Path filePath, Set<String> consumedFiles)
{
LinkedHashSet<Path> pathSet;
pathSet = super.scan(fs, filePath, consumedFiles);
TreeSet<Path> orderFiles = new TreeSet<>();
orderFiles.addAll(pathSet);
pathSet.clear();
Iterator<Path> fileIterator = orderFiles.iterator();
while (fileIterator.hasNext()) {
pathSet.add(fileIterator.next());
}
return pathSet;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import com.google.common.collect.Lists;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.queries.TermsFilter;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.search.AndFilter;
import org.elasticsearch.common.lucene.search.OrFilter;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.lucene.search.XBooleanFilter;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.cache.filter.terms.IndicesTermsFilterCache;
import org.elasticsearch.indices.cache.filter.terms.TermsLookup;
import java.io.IOException;
import java.util.List;
import static org.elasticsearch.index.query.support.QueryParsers.wrapSmartNameFilter;
/**
*
*/
public class TermsFilterParser implements FilterParser {
public static final String NAME = "terms";
private IndicesTermsFilterCache termsFilterCache;
@Inject
public TermsFilterParser() {
}
@Override
public String[] names() {
return new String[]{NAME, "in"};
}
@Inject(optional = true)
public void setIndicesTermsFilterCache(IndicesTermsFilterCache termsFilterCache) {
this.termsFilterCache = termsFilterCache;
}
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
MapperService.SmartNameFieldMappers smartNameFieldMappers;
Boolean cache = null;
String filterName = null;
String currentFieldName = null;
String lookupIndex = parseContext.index().name();
String lookupType = null;
String lookupId = null;
String lookupPath = null;
String lookupRouting = null;
boolean lookupCache = true;
CacheKeyFilter.Key cacheKey = null;
XContentParser.Token token;
String execution = "plain";
List<Object> terms = Lists.newArrayList();
String fieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
Object value = parser.objectBytes();
if (value == null) {
throw new QueryParsingException(parseContext.index(), "No value specified for terms filter");
}
terms.add(value);
}
} else if (token == XContentParser.Token.START_OBJECT) {
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("index".equals(currentFieldName)) {
lookupIndex = parser.text();
} else if ("type".equals(currentFieldName)) {
lookupType = parser.text();
} else if ("id".equals(currentFieldName)) {
lookupId = parser.text();
} else if ("path".equals(currentFieldName)) {
lookupPath = parser.text();
} else if ("routing".equals(currentFieldName)) {
lookupRouting = parser.textOrNull();
} else if ("cache".equals(currentFieldName)) {
lookupCache = parser.booleanValue();
} else {
throw new QueryParsingException(parseContext.index(), "[terms] filter does not support [" + currentFieldName + "] within lookup element");
}
}
}
if (lookupType == null) {
throw new QueryParsingException(parseContext.index(), "[terms] filter lookup element requires specifying the type");
}
if (lookupId == null) {
throw new QueryParsingException(parseContext.index(), "[terms] filter lookup element requires specifying the id");
}
if (lookupPath == null) {
throw new QueryParsingException(parseContext.index(), "[terms] filter lookup element requires specifying the path");
}
} else if (token.isValue()) {
if ("execution".equals(currentFieldName)) {
execution = parser.text();
} else if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else if ("_cache".equals(currentFieldName)) {
cache = parser.booleanValue();
} else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) {
cacheKey = new CacheKeyFilter.Key(parser.text());
} else {
throw new QueryParsingException(parseContext.index(), "[terms] filter does not support [" + currentFieldName + "]");
}
}
}
if (fieldName == null) {
throw new QueryParsingException(parseContext.index(), "terms filter requires a field name, followed by array of terms");
}
FieldMapper fieldMapper = null;
smartNameFieldMappers = parseContext.smartFieldMappers(fieldName);
String[] previousTypes = null;
if (smartNameFieldMappers != null) {
if (smartNameFieldMappers.hasMapper()) {
fieldMapper = smartNameFieldMappers.mapper();
fieldName = fieldMapper.names().indexName();
}
// if we have a doc mapper, its explicit type, mark it
if (smartNameFieldMappers.explicitTypeInNameWithDocMapper()) {
previousTypes = QueryParseContext.setTypesWithPrevious(new String[]{smartNameFieldMappers.docMapper().type()});
}
}
if (lookupId != null) {
// if there are no mappings, then nothing has been indexing yet against this shard, so we can return
// no match (but not cached!), since the Terms Lookup relies on the fact that there are mappings...
if (fieldMapper == null) {
return Queries.MATCH_NO_FILTER;
}
// external lookup, use it
TermsLookup termsLookup = new TermsLookup(fieldMapper, lookupIndex, lookupType, lookupId, lookupRouting, lookupPath, parseContext);
Filter filter = termsFilterCache.termsFilter(termsLookup, lookupCache, cacheKey);
if (filter == null) {
return null;
}
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
return filter;
}
if (terms.isEmpty()) {
return Queries.MATCH_NO_FILTER;
}
try {
Filter filter;
if ("plain".equals(execution)) {
if (fieldMapper != null) {
filter = fieldMapper.termsFilter(terms, parseContext);
} else {
BytesRef[] filterValues = new BytesRef[terms.size()];
for (int i = 0; i < filterValues.length; i++) {
filterValues[i] = BytesRefs.toBytesRef(terms.get(i));
}
filter = new TermsFilter(fieldName, filterValues);
}
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("fielddata".equals(execution)) {
// if there are no mappings, then nothing has been indexing yet against this shard, so we can return
// no match (but not cached!), since the FieldDataTermsFilter relies on a mapping...
if (fieldMapper == null) {
return Queries.MATCH_NO_FILTER;
}
filter = fieldMapper.termsFilter(parseContext, terms, parseContext);
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("bool".equals(execution)) {
XBooleanFilter boolFiler = new XBooleanFilter();
if (fieldMapper != null) {
for (Object term : terms) {
boolFiler.add(parseContext.cacheFilter(fieldMapper.termFilter(term, parseContext), null), BooleanClause.Occur.SHOULD);
}
} else {
for (Object term : terms) {
boolFiler.add(parseContext.cacheFilter(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))), null), BooleanClause.Occur.SHOULD);
}
}
filter = boolFiler;
// only cache if explicitly told to, since we cache inner filters
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("bool_nocache".equals(execution)) {
XBooleanFilter boolFiler = new XBooleanFilter();
if (fieldMapper != null) {
for (Object term : terms) {
boolFiler.add(fieldMapper.termFilter(term, parseContext), BooleanClause.Occur.SHOULD);
}
} else {
for (Object term : terms) {
boolFiler.add(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))), BooleanClause.Occur.SHOULD);
}
}
filter = boolFiler;
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("and".equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
filters.add(parseContext.cacheFilter(fieldMapper.termFilter(term, parseContext), null));
}
} else {
for (Object term : terms) {
filters.add(parseContext.cacheFilter(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))), null));
}
}
filter = new AndFilter(filters);
// only cache if explicitly told to, since we cache inner filters
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("and_nocache".equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
filters.add(fieldMapper.termFilter(term, parseContext));
}
} else {
for (Object term : terms) {
filters.add(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))));
}
}
filter = new AndFilter(filters);
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("or".equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
filters.add(parseContext.cacheFilter(fieldMapper.termFilter(term, parseContext), null));
}
} else {
for (Object term : terms) {
filters.add(parseContext.cacheFilter(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))), null));
}
}
filter = new OrFilter(filters);
// only cache if explicitly told to, since we cache inner filters
if (cache != null && cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else if ("or_nocache".equals(execution)) {
List<Filter> filters = Lists.newArrayList();
if (fieldMapper != null) {
for (Object term : terms) {
filters.add(fieldMapper.termFilter(term, parseContext));
}
} else {
for (Object term : terms) {
filters.add(new TermFilter(new Term(fieldName, BytesRefs.toBytesRef(term))));
}
}
filter = new OrFilter(filters);
// cache the whole filter by default, or if explicitly told to
if (cache == null || cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
}
} else {
throw new QueryParsingException(parseContext.index(), "terms filter execution value [" + execution + "] not supported");
}
filter = wrapSmartNameFilter(filter, smartNameFieldMappers, parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}
return filter;
} finally {
if (smartNameFieldMappers != null && smartNameFieldMappers.explicitTypeInNameWithDocMapper()) {
QueryParseContext.setTypes(previousTypes);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.schema;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.Map.Entry;
import javax.annotation.Nullable;
import com.google.common.base.MoreObjects;
import com.google.common.collect.*;
import org.apache.cassandra.auth.DataResource;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.ColumnIdentifier;
import org.apache.cassandra.cql3.CqlBuilder;
import org.apache.cassandra.cql3.SchemaElement;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.exceptions.InvalidRequestException;
import org.apache.cassandra.service.reads.SpeculativeRetryPolicy;
import org.apache.cassandra.utils.AbstractIterator;
import org.github.jamm.Unmetered;
import static com.google.common.collect.Iterables.any;
import static com.google.common.collect.Iterables.transform;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static org.apache.cassandra.schema.IndexMetadata.isNameValid;
@Unmetered
public final class TableMetadata implements SchemaElement
{
public static final String COMPACT_STORAGE_HALT_MESSAGE =
"Detected table %s.%s with COMPACT STORAGE flags (%s). " +
"Compact Tables are not supported in Cassandra starting with version 4.0. " +
"Use the `ALTER ... DROP COMPACT STORAGE` command supplied in 3.x/3.11 Cassandra " +
"in order to migrate off Compact Storage before upgrading.";
// Please note that currently the only one truly useful flag is COUNTER, as the rest of the flags were about
// differencing between CQL tables and the various types of COMPACT STORAGE tables (pre-4.0). As those "compact"
// tables are not supported anymore, no tables should be either SUPER or DENSE, and they should all be COMPOUND.
public enum Flag
{
// As mentioned above, all tables on 4.0+ will have the COMPOUND flag, making the flag of little value. However,
// on upgrade from pre-4.0, we want to detect if a tables does _not_ have this flag, in which case this would
// be a compact table on which DROP COMPACT STORAGE has _not_ be used and fail startup. This is also why we
// still write this flag for all tables. Once we drop support for upgrading from pre-4.0 versions (and so are
// sure all tables do have the flag), we can stop writing this flag and ignore it when present (deprecate it).
// Later, we'll be able to drop the flag from this enum completely.
COMPOUND,
COUNTER,
// The only reason we still have those is that on the first startup after an upgrade from pre-4.0, we cannot
// guarantee some tables won't have those flags (users having forgotten to use DROP COMPACT STORAGE before
// upgrading). So we still "deserialize" those flags correctly, but otherwise prevent startup if any table
// have them. Once we drop support for upgrading from pre-4.0, we can remove those values.
@Deprecated SUPER,
@Deprecated DENSE;
static boolean isLegacyCompactTable(Set<Flag> flags)
{
return flags.contains(Flag.DENSE) || flags.contains(Flag.SUPER) || !flags.contains(Flag.COMPOUND);
}
public static Set<Flag> fromStringSet(Set<String> strings)
{
return strings.stream().map(String::toUpperCase).map(Flag::valueOf).collect(toSet());
}
public static Set<String> toStringSet(Set<Flag> flags)
{
return flags.stream().map(Flag::toString).map(String::toLowerCase).collect(toSet());
}
}
public enum Kind
{
REGULAR, INDEX, VIEW, VIRTUAL
}
public final String keyspace;
public final String name;
public final TableId id;
public final IPartitioner partitioner;
public final Kind kind;
public final TableParams params;
public final ImmutableSet<Flag> flags;
@Nullable
private final String indexName; // derived from table name
/*
* All CQL3 columns definition are stored in the columns map.
* On top of that, we keep separated collection of each kind of definition, to
* 1) allow easy access to each kind and
* 2) for the partition key and clustering key ones, those list are ordered by the "component index" of the elements.
*/
public final ImmutableMap<ByteBuffer, DroppedColumn> droppedColumns;
final ImmutableMap<ByteBuffer, ColumnMetadata> columns;
private final ImmutableList<ColumnMetadata> partitionKeyColumns;
private final ImmutableList<ColumnMetadata> clusteringColumns;
private final RegularAndStaticColumns regularAndStaticColumns;
public final Indexes indexes;
public final Triggers triggers;
// derived automatically from flags and columns
public final AbstractType<?> partitionKeyType;
public final ClusteringComparator comparator;
// performance hacks; TODO see if all are really necessary
public final DataResource resource;
private TableMetadata(Builder builder)
{
if (Flag.isLegacyCompactTable(builder.flags))
throw new IllegalStateException(format(COMPACT_STORAGE_HALT_MESSAGE,
builder.keyspace,
builder.name,
builder.flags));
flags = Sets.immutableEnumSet(builder.flags);
keyspace = builder.keyspace;
name = builder.name;
id = builder.id;
partitioner = builder.partitioner;
kind = builder.kind;
params = builder.params.build();
indexName = kind == Kind.INDEX ? name.substring(name.indexOf('.') + 1) : null;
droppedColumns = ImmutableMap.copyOf(builder.droppedColumns);
Collections.sort(builder.partitionKeyColumns);
partitionKeyColumns = ImmutableList.copyOf(builder.partitionKeyColumns);
Collections.sort(builder.clusteringColumns);
clusteringColumns = ImmutableList.copyOf(builder.clusteringColumns);
regularAndStaticColumns = RegularAndStaticColumns.builder().addAll(builder.regularAndStaticColumns).build();
columns = ImmutableMap.copyOf(builder.columns);
indexes = builder.indexes;
triggers = builder.triggers;
partitionKeyType = partitionKeyColumns.size() == 1
? partitionKeyColumns.get(0).type
: CompositeType.getInstance(transform(partitionKeyColumns, t -> t.type));
comparator = new ClusteringComparator(transform(clusteringColumns, c -> c.type));
resource = DataResource.table(keyspace, name);
}
public static Builder builder(String keyspace, String table)
{
return new Builder(keyspace, table);
}
public static Builder builder(String keyspace, String table, TableId id)
{
return new Builder(keyspace, table, id);
}
public Builder unbuild()
{
return builder(keyspace, name, id)
.partitioner(partitioner)
.kind(kind)
.params(params)
.flags(flags)
.addColumns(columns())
.droppedColumns(droppedColumns)
.indexes(indexes)
.triggers(triggers);
}
public boolean isIndex()
{
return kind == Kind.INDEX;
}
public TableMetadata withSwapped(TableParams params)
{
return unbuild().params(params).build();
}
public TableMetadata withSwapped(Triggers triggers)
{
return unbuild().triggers(triggers).build();
}
public TableMetadata withSwapped(Indexes indexes)
{
return unbuild().indexes(indexes).build();
}
public boolean isView()
{
return kind == Kind.VIEW;
}
public boolean isVirtual()
{
return kind == Kind.VIRTUAL;
}
public Optional<String> indexName()
{
return Optional.ofNullable(indexName);
}
public boolean isCounter()
{
return flags.contains(Flag.COUNTER);
}
public ImmutableCollection<ColumnMetadata> columns()
{
return columns.values();
}
public Iterable<ColumnMetadata> primaryKeyColumns()
{
return Iterables.concat(partitionKeyColumns, clusteringColumns);
}
public ImmutableList<ColumnMetadata> partitionKeyColumns()
{
return partitionKeyColumns;
}
public ImmutableList<ColumnMetadata> clusteringColumns()
{
return clusteringColumns;
}
public RegularAndStaticColumns regularAndStaticColumns()
{
return regularAndStaticColumns;
}
public Columns regularColumns()
{
return regularAndStaticColumns.regulars;
}
public Columns staticColumns()
{
return regularAndStaticColumns.statics;
}
/*
* An iterator over all column definitions but that respect the order of a SELECT *.
*/
public Iterator<ColumnMetadata> allColumnsInSelectOrder()
{
Iterator<ColumnMetadata> partitionKeyIter = partitionKeyColumns.iterator();
Iterator<ColumnMetadata> clusteringIter = clusteringColumns.iterator();
Iterator<ColumnMetadata> otherColumns = regularAndStaticColumns.selectOrderIterator();
return columnsIterator(partitionKeyIter, clusteringIter, otherColumns);
}
/**
* Returns an iterator over all column definitions that respect the order of the CREATE statement.
*/
public Iterator<ColumnMetadata> allColumnsInCreateOrder()
{
Iterator<ColumnMetadata> partitionKeyIter = partitionKeyColumns.iterator();
Iterator<ColumnMetadata> clusteringIter = clusteringColumns.iterator();
Iterator<ColumnMetadata> otherColumns = regularAndStaticColumns.iterator();
return columnsIterator(partitionKeyIter, clusteringIter, otherColumns);
}
private static Iterator<ColumnMetadata> columnsIterator(Iterator<ColumnMetadata> partitionKeys,
Iterator<ColumnMetadata> clusteringColumns,
Iterator<ColumnMetadata> otherColumns)
{
return new AbstractIterator<ColumnMetadata>()
{
protected ColumnMetadata computeNext()
{
if (partitionKeys.hasNext())
return partitionKeys.next();
if (clusteringColumns.hasNext())
return clusteringColumns.next();
return otherColumns.hasNext() ? otherColumns.next() : endOfData();
}
};
}
/**
* Returns the ColumnMetadata for {@code name}.
*/
public ColumnMetadata getColumn(ColumnIdentifier name)
{
return columns.get(name.bytes);
}
/**
* Returns the column of the provided name if it exists, but throws a user-visible exception if that column doesn't
* exist.
*
* <p>This method is for finding columns from a name provided by the user, and as such it does _not_ returne hidden
* columns (throwing that the column is unknown instead).
*
* @param name the name of an existing non-hidden column of this table.
* @return the column metadata corresponding to {@code name}.
*
* @throws InvalidRequestException if there is no non-hidden column named {@code name} in this table.
*/
public ColumnMetadata getExistingColumn(ColumnIdentifier name)
{
ColumnMetadata def = getColumn(name);
if (def == null)
throw new InvalidRequestException(format("Undefined column name %s in table %s", name.toCQLString(), this));
return def;
}
/*
* In general it is preferable to work with ColumnIdentifier to make it
* clear that we are talking about a CQL column, not a cell name, but there
* is a few cases where all we have is a ByteBuffer (when dealing with IndexExpression
* for instance) so...
*/
public ColumnMetadata getColumn(ByteBuffer name)
{
return columns.get(name);
}
public ColumnMetadata getDroppedColumn(ByteBuffer name)
{
DroppedColumn dropped = droppedColumns.get(name);
return dropped == null ? null : dropped.column;
}
/**
* Returns a "fake" ColumnMetadata corresponding to the dropped column {@code name}
* of {@code null} if there is no such dropped column.
*
* @param name - the column name
* @param isStatic - whether the column was a static column, if known
*/
public ColumnMetadata getDroppedColumn(ByteBuffer name, boolean isStatic)
{
DroppedColumn dropped = droppedColumns.get(name);
if (dropped == null)
return null;
if (isStatic && !dropped.column.isStatic())
return ColumnMetadata.staticColumn(this, name, dropped.column.type);
return dropped.column;
}
public boolean hasStaticColumns()
{
return !staticColumns().isEmpty();
}
public void validate()
{
if (!isNameValid(keyspace))
except("Keyspace name must not be empty, more than %s characters long, or contain non-alphanumeric-underscore characters (got \"%s\")", SchemaConstants.NAME_LENGTH, keyspace);
if (!isNameValid(name))
except("Table name must not be empty, more than %s characters long, or contain non-alphanumeric-underscore characters (got \"%s\")", SchemaConstants.NAME_LENGTH, name);
params.validate();
if (partitionKeyColumns.stream().anyMatch(c -> c.type.isCounter()))
except("PRIMARY KEY columns cannot contain counters");
// Mixing counter with non counter columns is not supported (#2614)
if (isCounter())
{
for (ColumnMetadata column : regularAndStaticColumns)
if (!(column.type.isCounter()) && !isSuperColumnMapColumnName(column.name))
except("Cannot have a non counter column (\"%s\") in a counter table", column.name);
}
else
{
for (ColumnMetadata column : regularAndStaticColumns)
if (column.type.isCounter())
except("Cannot have a counter column (\"%s\") in a non counter column table", column.name);
}
// All tables should have a partition key
if (partitionKeyColumns.isEmpty())
except("Missing partition keys for table %s", toString());
indexes.validate(this);
}
/**
* To support backward compatibility with thrift super columns in the C* 3.0+ storage engine, we encode said super
* columns as a CQL {@code map<blob, blob>}. To ensure the name of this map did not conflict with any other user
* defined columns, we used the empty name (which is otherwise not allowed for user created columns).
* <p>
* While all thrift-based tables must have been converted to "CQL" ones with "DROP COMPACT STORAGE" (before
* upgrading to C* 4.0, which stop supporting non-CQL tables completely), a converted super-column table will still
* have this map with an empty name. And the reason we need to recognize it still, is that for backward
* compatibility we need to support counters in values of this map while it's not supported in any other map.
*
* TODO: it's probably worth lifting the limitation of not allowing counters as map values. It works fully
* internally (since we had to support it for this special map) and doesn't feel particularly dangerous to
* support. Doing so would remove this special case, but would also let user that do have an upgraded super-column
* table with counters to rename that weirdly name map to something more meaningful (it's not possible today
* as after renaming the validation in {@link #validate)} would trigger).
*/
private static boolean isSuperColumnMapColumnName(ColumnIdentifier columnName)
{
return !columnName.bytes.hasRemaining();
}
void validateCompatibility(TableMetadata previous)
{
if (isIndex())
return;
if (!previous.keyspace.equals(keyspace))
except("Keyspace mismatch (found %s; expected %s)", keyspace, previous.keyspace);
if (!previous.name.equals(name))
except("Table mismatch (found %s; expected %s)", name, previous.name);
if (!previous.id.equals(id))
except("Table ID mismatch (found %s; expected %s)", id, previous.id);
if (!previous.flags.equals(flags))
except("Table type mismatch (found %s; expected %s)", flags, previous.flags);
if (previous.partitionKeyColumns.size() != partitionKeyColumns.size())
{
except("Partition keys of different length (found %s; expected %s)",
partitionKeyColumns.size(),
previous.partitionKeyColumns.size());
}
for (int i = 0; i < partitionKeyColumns.size(); i++)
{
if (!partitionKeyColumns.get(i).type.isCompatibleWith(previous.partitionKeyColumns.get(i).type))
{
except("Partition key column mismatch (found %s; expected %s)",
partitionKeyColumns.get(i).type,
previous.partitionKeyColumns.get(i).type);
}
}
if (previous.clusteringColumns.size() != clusteringColumns.size())
{
except("Clustering columns of different length (found %s; expected %s)",
clusteringColumns.size(),
previous.clusteringColumns.size());
}
for (int i = 0; i < clusteringColumns.size(); i++)
{
if (!clusteringColumns.get(i).type.isCompatibleWith(previous.clusteringColumns.get(i).type))
{
except("Clustering column mismatch (found %s; expected %s)",
clusteringColumns.get(i).type,
previous.clusteringColumns.get(i).type);
}
}
for (ColumnMetadata previousColumn : previous.regularAndStaticColumns)
{
ColumnMetadata column = getColumn(previousColumn.name);
if (column != null && !column.type.isCompatibleWith(previousColumn.type))
except("Column mismatch (found %s; expected %s)", column, previousColumn);
}
}
public ClusteringComparator partitionKeyAsClusteringComparator()
{
return new ClusteringComparator(partitionKeyColumns.stream().map(c -> c.type).collect(toList()));
}
/**
* Generate a table name for an index corresponding to the given column.
* This is NOT the same as the index's name! This is only used in sstable filenames and is not exposed to users.
*
* @param info A definition of the column with index
*
* @return name of the index table
*/
public String indexTableName(IndexMetadata info)
{
// TODO simplify this when info.index_name is guaranteed to be set
return name + Directories.SECONDARY_INDEX_NAME_SEPARATOR + info.name;
}
/**
* @return true if the change as made impacts queries/updates on the table,
* e.g. any columns or indexes were added, removed, or altered; otherwise, false is returned.
* Used to determine whether prepared statements against this table need to be re-prepared.
*/
boolean changeAffectsPreparedStatements(TableMetadata updated)
{
return !partitionKeyColumns.equals(updated.partitionKeyColumns)
|| !clusteringColumns.equals(updated.clusteringColumns)
|| !regularAndStaticColumns.equals(updated.regularAndStaticColumns)
|| !indexes.equals(updated.indexes)
|| params.defaultTimeToLive != updated.params.defaultTimeToLive
|| params.gcGraceSeconds != updated.params.gcGraceSeconds;
}
/**
* There is a couple of places in the code where we need a TableMetadata object and don't have one readily available
* and know that only the keyspace and name matter. This creates such "fake" metadata. Use only if you know what
* you're doing.
*/
public static TableMetadata minimal(String keyspace, String name)
{
return TableMetadata.builder(keyspace, name)
.addPartitionKeyColumn("key", BytesType.instance)
.build();
}
public TableMetadata updateIndexTableMetadata(TableParams baseTableParams)
{
TableParams.Builder builder = baseTableParams.unbuild().gcGraceSeconds(0);
// Depends on parent's cache setting, turn on its index table's cache.
// Row caching is never enabled; see CASSANDRA-5732
builder.caching(baseTableParams.caching.cacheKeys() ? CachingParams.CACHE_KEYS : CachingParams.CACHE_NOTHING);
return unbuild().params(builder.build()).build();
}
boolean referencesUserType(ByteBuffer name)
{
return any(columns(), c -> c.type.referencesUserType(name));
}
public TableMetadata withUpdatedUserType(UserType udt)
{
if (!referencesUserType(udt.name))
return this;
Builder builder = unbuild();
columns().forEach(c -> builder.alterColumnType(c.name, c.type.withUpdatedUserType(udt)));
return builder.build();
}
private void except(String format, Object... args)
{
throw new ConfigurationException(keyspace + "." + name + ": " + format(format, args));
}
@Override
public boolean equals(Object o)
{
if (this == o)
return true;
if (!(o instanceof TableMetadata))
return false;
TableMetadata tm = (TableMetadata) o;
return equalsWithoutColumns(tm) && columns.equals(tm.columns);
}
private boolean equalsWithoutColumns(TableMetadata tm)
{
return keyspace.equals(tm.keyspace)
&& name.equals(tm.name)
&& id.equals(tm.id)
&& partitioner.equals(tm.partitioner)
&& kind == tm.kind
&& params.equals(tm.params)
&& flags.equals(tm.flags)
&& droppedColumns.equals(tm.droppedColumns)
&& indexes.equals(tm.indexes)
&& triggers.equals(tm.triggers);
}
Optional<Difference> compare(TableMetadata other)
{
return equalsWithoutColumns(other)
? compareColumns(other.columns)
: Optional.of(Difference.SHALLOW);
}
private Optional<Difference> compareColumns(Map<ByteBuffer, ColumnMetadata> other)
{
if (!columns.keySet().equals(other.keySet()))
return Optional.of(Difference.SHALLOW);
boolean differsDeeply = false;
for (Map.Entry<ByteBuffer, ColumnMetadata> entry : columns.entrySet())
{
ColumnMetadata thisColumn = entry.getValue();
ColumnMetadata thatColumn = other.get(entry.getKey());
Optional<Difference> difference = thisColumn.compare(thatColumn);
if (difference.isPresent())
{
switch (difference.get())
{
case SHALLOW:
return difference;
case DEEP:
differsDeeply = true;
}
}
}
return differsDeeply ? Optional.of(Difference.DEEP) : Optional.empty();
}
@Override
public int hashCode()
{
return Objects.hash(keyspace, name, id, partitioner, kind, params, flags, columns, droppedColumns, indexes, triggers);
}
@Override
public String toString()
{
return format("%s.%s", ColumnIdentifier.maybeQuote(keyspace), ColumnIdentifier.maybeQuote(name));
}
public String toDebugString()
{
return MoreObjects.toStringHelper(this)
.add("keyspace", keyspace)
.add("table", name)
.add("id", id)
.add("partitioner", partitioner)
.add("kind", kind)
.add("params", params)
.add("flags", flags)
.add("columns", columns())
.add("droppedColumns", droppedColumns.values())
.add("indexes", indexes)
.add("triggers", triggers)
.toString();
}
public static final class Builder
{
final String keyspace;
final String name;
private TableId id;
private IPartitioner partitioner;
private Kind kind = Kind.REGULAR;
private TableParams.Builder params = TableParams.builder();
// See the comment on Flag.COMPOUND definition for why we (still) inconditionally add this flag.
private Set<Flag> flags = EnumSet.of(Flag.COMPOUND);
private Triggers triggers = Triggers.none();
private Indexes indexes = Indexes.none();
private final Map<ByteBuffer, DroppedColumn> droppedColumns = new HashMap<>();
private final Map<ByteBuffer, ColumnMetadata> columns = new HashMap<>();
private final List<ColumnMetadata> partitionKeyColumns = new ArrayList<>();
private final List<ColumnMetadata> clusteringColumns = new ArrayList<>();
private final List<ColumnMetadata> regularAndStaticColumns = new ArrayList<>();
private Builder(String keyspace, String name, TableId id)
{
this.keyspace = keyspace;
this.name = name;
this.id = id;
}
private Builder(String keyspace, String name)
{
this.keyspace = keyspace;
this.name = name;
}
public TableMetadata build()
{
if (partitioner == null)
partitioner = DatabaseDescriptor.getPartitioner();
if (id == null)
id = TableId.generate();
return new TableMetadata(this);
}
public Builder id(TableId val)
{
id = val;
return this;
}
public Builder partitioner(IPartitioner val)
{
partitioner = val;
return this;
}
public Builder kind(Kind val)
{
kind = val;
return this;
}
public Builder params(TableParams val)
{
params = val.unbuild();
return this;
}
public Builder bloomFilterFpChance(double val)
{
params.bloomFilterFpChance(val);
return this;
}
public Builder caching(CachingParams val)
{
params.caching(val);
return this;
}
public Builder comment(String val)
{
params.comment(val);
return this;
}
public Builder compaction(CompactionParams val)
{
params.compaction(val);
return this;
}
public Builder compression(CompressionParams val)
{
params.compression(val);
return this;
}
public Builder defaultTimeToLive(int val)
{
params.defaultTimeToLive(val);
return this;
}
public Builder gcGraceSeconds(int val)
{
params.gcGraceSeconds(val);
return this;
}
public Builder maxIndexInterval(int val)
{
params.maxIndexInterval(val);
return this;
}
public Builder memtableFlushPeriod(int val)
{
params.memtableFlushPeriodInMs(val);
return this;
}
public Builder minIndexInterval(int val)
{
params.minIndexInterval(val);
return this;
}
public Builder crcCheckChance(double val)
{
params.crcCheckChance(val);
return this;
}
public Builder speculativeRetry(SpeculativeRetryPolicy val)
{
params.speculativeRetry(val);
return this;
}
public Builder additionalWritePolicy(SpeculativeRetryPolicy val)
{
params.additionalWritePolicy(val);
return this;
}
public Builder extensions(Map<String, ByteBuffer> val)
{
params.extensions(val);
return this;
}
public Builder flags(Set<Flag> val)
{
flags = val;
return this;
}
public Builder isCounter(boolean val)
{
return flag(Flag.COUNTER, val);
}
private Builder flag(Flag flag, boolean set)
{
if (set) flags.add(flag); else flags.remove(flag);
return this;
}
public Builder triggers(Triggers val)
{
triggers = val;
return this;
}
public Builder indexes(Indexes val)
{
indexes = val;
return this;
}
public Builder addPartitionKeyColumn(String name, AbstractType type)
{
return addPartitionKeyColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addPartitionKeyColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, partitionKeyColumns.size(), ColumnMetadata.Kind.PARTITION_KEY));
}
public Builder addClusteringColumn(String name, AbstractType type)
{
return addClusteringColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addClusteringColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, clusteringColumns.size(), ColumnMetadata.Kind.CLUSTERING));
}
public Builder addRegularColumn(String name, AbstractType type)
{
return addRegularColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addRegularColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, ColumnMetadata.NO_POSITION, ColumnMetadata.Kind.REGULAR));
}
public Builder addStaticColumn(String name, AbstractType type)
{
return addStaticColumn(ColumnIdentifier.getInterned(name, false), type);
}
public Builder addStaticColumn(ColumnIdentifier name, AbstractType type)
{
return addColumn(new ColumnMetadata(keyspace, this.name, name, type, ColumnMetadata.NO_POSITION, ColumnMetadata.Kind.STATIC));
}
public Builder addColumn(ColumnMetadata column)
{
if (columns.containsKey(column.name.bytes))
throw new IllegalArgumentException();
switch (column.kind)
{
case PARTITION_KEY:
partitionKeyColumns.add(column);
Collections.sort(partitionKeyColumns);
break;
case CLUSTERING:
column.type.checkComparable();
clusteringColumns.add(column);
Collections.sort(clusteringColumns);
break;
default:
regularAndStaticColumns.add(column);
}
columns.put(column.name.bytes, column);
return this;
}
public Builder addColumns(Iterable<ColumnMetadata> columns)
{
columns.forEach(this::addColumn);
return this;
}
public Builder droppedColumns(Map<ByteBuffer, DroppedColumn> droppedColumns)
{
this.droppedColumns.clear();
this.droppedColumns.putAll(droppedColumns);
return this;
}
/**
* Records a deprecated column for a system table.
*/
public Builder recordDeprecatedSystemColumn(String name, AbstractType<?> type)
{
// As we play fast and loose with the removal timestamp, make sure this is misued for a non system table.
assert SchemaConstants.isLocalSystemKeyspace(keyspace);
recordColumnDrop(ColumnMetadata.regularColumn(keyspace, this.name, name, type), Long.MAX_VALUE);
return this;
}
public Builder recordColumnDrop(ColumnMetadata column, long timeMicros)
{
droppedColumns.put(column.name.bytes, new DroppedColumn(column.withNewType(column.type.expandUserTypes()), timeMicros));
return this;
}
public Iterable<ColumnMetadata> columns()
{
return columns.values();
}
public Set<String> columnNames()
{
return columns.values().stream().map(c -> c.name.toString()).collect(toSet());
}
public ColumnMetadata getColumn(ColumnIdentifier identifier)
{
return columns.get(identifier.bytes);
}
public ColumnMetadata getColumn(ByteBuffer name)
{
return columns.get(name);
}
public boolean hasRegularColumns()
{
return regularAndStaticColumns.stream().anyMatch(ColumnMetadata::isRegular);
}
/*
* The following methods all assume a Builder with valid set of partition key, clustering, regular and static columns.
*/
public Builder removeRegularOrStaticColumn(ColumnIdentifier identifier)
{
ColumnMetadata column = columns.get(identifier.bytes);
if (column == null || column.isPrimaryKeyColumn())
throw new IllegalArgumentException();
columns.remove(identifier.bytes);
regularAndStaticColumns.remove(column);
return this;
}
public Builder renamePrimaryKeyColumn(ColumnIdentifier from, ColumnIdentifier to)
{
if (columns.containsKey(to.bytes))
throw new IllegalArgumentException();
ColumnMetadata column = columns.get(from.bytes);
if (column == null || !column.isPrimaryKeyColumn())
throw new IllegalArgumentException();
ColumnMetadata newColumn = column.withNewName(to);
if (column.isPartitionKey())
partitionKeyColumns.set(column.position(), newColumn);
else
clusteringColumns.set(column.position(), newColumn);
columns.remove(from.bytes);
columns.put(to.bytes, newColumn);
return this;
}
Builder alterColumnType(ColumnIdentifier name, AbstractType<?> type)
{
ColumnMetadata column = columns.get(name.bytes);
if (column == null)
throw new IllegalArgumentException();
ColumnMetadata newColumn = column.withNewType(type);
switch (column.kind)
{
case PARTITION_KEY:
partitionKeyColumns.set(column.position(), newColumn);
break;
case CLUSTERING:
clusteringColumns.set(column.position(), newColumn);
break;
case REGULAR:
case STATIC:
regularAndStaticColumns.remove(column);
regularAndStaticColumns.add(newColumn);
break;
}
columns.put(column.name.bytes, newColumn);
return this;
}
}
/**
* A table with strict liveness filters/ignores rows without PK liveness info,
* effectively tying the row liveness to its primary key liveness.
*
* Currently this is only used by views with normal base column as PK column
* so updates to other columns do not make the row live when the base column
* is not live. See CASSANDRA-11500.
*
* TODO: does not belong here, should be gone
*/
public boolean enforceStrictLiveness()
{
return isView() && Keyspace.open(keyspace).viewManager.getByName(name).enforceStrictLiveness();
}
/**
* Returns the names of all the user types referenced by this table.
*
* @return the names of all the user types referenced by this table.
*/
public Set<ByteBuffer> getReferencedUserTypes()
{
Set<ByteBuffer> types = new LinkedHashSet<>();
columns().forEach(c -> addUserTypes(c.type, types));
return types;
}
/**
* Find all user types used by the specified type and add them to the set.
*
* @param type the type to check for user types.
* @param types the set of UDT names to which to add new user types found in {@code type}. Note that the
* insertion ordering is important and ensures that if a user type A uses another user type B, then B will appear
* before A in iteration order.
*/
private static void addUserTypes(AbstractType<?> type, Set<ByteBuffer> types)
{
// Reach into subtypes first, so that if the type is a UDT, it's dependencies are recreated first.
type.subTypes().forEach(t -> addUserTypes(t, types));
if (type.isUDT())
types.add(((UserType)type).name);
}
@Override
public SchemaElementType elementType()
{
return SchemaElementType.TABLE;
}
@Override
public String elementKeyspace()
{
return keyspace;
}
@Override
public String elementName()
{
return name;
}
@Override
public String toCqlString(boolean withInternals)
{
CqlBuilder builder = new CqlBuilder(2048);
appendCqlTo(builder, withInternals, withInternals, false);
return builder.toString();
}
public String toCqlString(boolean includeDroppedColumns,
boolean internals,
boolean ifNotExists)
{
CqlBuilder builder = new CqlBuilder(2048);
appendCqlTo(builder, includeDroppedColumns, internals, ifNotExists);
return builder.toString();
}
public void appendCqlTo(CqlBuilder builder,
boolean includeDroppedColumns,
boolean internals,
boolean ifNotExists)
{
assert !isView();
String createKeyword = "CREATE";
if (isVirtual())
{
builder.append(String.format("/*\n" +
"Warning: Table %s is a virtual table and cannot be recreated with CQL.\n" +
"Structure, for reference:\n",
toString()));
createKeyword = "VIRTUAL";
}
builder.append(createKeyword)
.append(" TABLE ");
if (ifNotExists)
builder.append("IF NOT EXISTS ");
builder.append(toString())
.append(" (")
.newLine()
.increaseIndent();
boolean hasSingleColumnPrimaryKey = partitionKeyColumns.size() == 1 && clusteringColumns.isEmpty();
appendColumnDefinitions(builder, includeDroppedColumns, hasSingleColumnPrimaryKey);
if (!hasSingleColumnPrimaryKey)
appendPrimaryKey(builder);
builder.decreaseIndent()
.append(')');
appendTableOptions(builder, internals);
builder.decreaseIndent();
if (isVirtual())
{
builder.newLine()
.append("*/");
}
if (includeDroppedColumns)
appendDropColumns(builder);
}
private void appendColumnDefinitions(CqlBuilder builder,
boolean includeDroppedColumns,
boolean hasSingleColumnPrimaryKey)
{
Iterator<ColumnMetadata> iter = allColumnsInCreateOrder();
while (iter.hasNext())
{
ColumnMetadata column = iter.next();
// If the column has been re-added after a drop, we don't include it right away. Instead, we'll add the
// dropped one first below, then we'll issue the DROP and then the actual ADD for this column, thus
// simulating the proper sequence of events.
if (includeDroppedColumns && droppedColumns.containsKey(column.name.bytes))
continue;
column.appendCqlTo(builder);
if (hasSingleColumnPrimaryKey && column.isPartitionKey())
builder.append(" PRIMARY KEY");
if (!hasSingleColumnPrimaryKey || (includeDroppedColumns && !droppedColumns.isEmpty()) || iter.hasNext())
builder.append(',');
builder.newLine();
}
if (includeDroppedColumns)
{
Iterator<DroppedColumn> iterDropped = droppedColumns.values().iterator();
while (iterDropped.hasNext())
{
DroppedColumn dropped = iterDropped.next();
dropped.column.appendCqlTo(builder);
if (!hasSingleColumnPrimaryKey || iter.hasNext())
builder.append(',');
builder.newLine();
}
}
}
void appendPrimaryKey(CqlBuilder builder)
{
List<ColumnMetadata> partitionKeyColumns = partitionKeyColumns();
List<ColumnMetadata> clusteringColumns = clusteringColumns();
builder.append("PRIMARY KEY (");
if (partitionKeyColumns.size() > 1)
{
builder.append('(')
.appendWithSeparators(partitionKeyColumns, (b, c) -> b.append(c.name), ", ")
.append(')');
}
else
{
builder.append(partitionKeyColumns.get(0).name);
}
if (!clusteringColumns.isEmpty())
builder.append(", ")
.appendWithSeparators(clusteringColumns, (b, c) -> b.append(c.name), ", ");
builder.append(')')
.newLine();
}
void appendTableOptions(CqlBuilder builder, boolean internals)
{
builder.append(" WITH ")
.increaseIndent();
if (internals)
builder.append("ID = ")
.append(id.toString())
.newLine()
.append("AND ");
List<ColumnMetadata> clusteringColumns = clusteringColumns();
if (!clusteringColumns.isEmpty())
{
builder.append("CLUSTERING ORDER BY (")
.appendWithSeparators(clusteringColumns, (b, c) -> c.appendNameAndOrderTo(b), ", ")
.append(')')
.newLine()
.append("AND ");
}
if (isVirtual())
{
builder.append("comment = ").appendWithSingleQuotes(params.comment);
}
else
{
params.appendCqlTo(builder);
}
builder.append(";");
}
private void appendDropColumns(CqlBuilder builder)
{
for (Entry<ByteBuffer, DroppedColumn> entry : droppedColumns.entrySet())
{
DroppedColumn dropped = entry.getValue();
builder.newLine()
.append("ALTER TABLE ")
.append(toString())
.append(" DROP ")
.append(dropped.column.name)
.append(" USING TIMESTAMP ")
.append(dropped.droppedTime)
.append(';');
ColumnMetadata column = getColumn(entry.getKey());
if (column != null)
{
builder.newLine()
.append("ALTER TABLE ")
.append(toString())
.append(" ADD ");
column.appendCqlTo(builder);
builder.append(';');
}
}
}
}
| |
/*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service;
import com.rits.cloning.Cloner;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterial;
import com.thoughtworks.go.config.materials.mercurial.HgMaterial;
import com.thoughtworks.go.config.materials.mercurial.HgMaterialConfig;
import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry;
import com.thoughtworks.go.domain.*;
import com.thoughtworks.go.domain.activity.AgentAssignment;
import com.thoughtworks.go.domain.buildcause.BuildCause;
import com.thoughtworks.go.domain.builder.Builder;
import com.thoughtworks.go.domain.builder.FetchArtifactBuilder;
import com.thoughtworks.go.domain.materials.Material;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.domain.materials.svn.Subversion;
import com.thoughtworks.go.domain.materials.svn.SvnCommand;
import com.thoughtworks.go.fixture.PipelineWithTwoStages;
import com.thoughtworks.go.helper.AgentMother;
import com.thoughtworks.go.helper.SvnTestRepo;
import com.thoughtworks.go.helper.TestRepo;
import com.thoughtworks.go.remote.AgentIdentifier;
import com.thoughtworks.go.remote.work.BuildWork;
import com.thoughtworks.go.remote.work.DeniedAgentWork;
import com.thoughtworks.go.remote.work.NoWork;
import com.thoughtworks.go.remote.work.Work;
import com.thoughtworks.go.server.cache.GoCache;
import com.thoughtworks.go.server.dao.DatabaseAccessHelper;
import com.thoughtworks.go.server.dao.JobInstanceDao;
import com.thoughtworks.go.server.dao.PipelineDao;
import com.thoughtworks.go.server.dao.StageDao;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.persistence.MaterialRepository;
import com.thoughtworks.go.server.scheduling.ScheduleHelper;
import com.thoughtworks.go.server.service.builders.BuilderFactory;
import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
import com.thoughtworks.go.server.transaction.TransactionTemplate;
import com.thoughtworks.go.server.websocket.AgentRemoteHandler;
import com.thoughtworks.go.server.websocket.AgentStub;
import com.thoughtworks.go.util.*;
import com.thoughtworks.go.utils.SerializationTester;
import com.thoughtworks.go.websocket.Action;
import com.thoughtworks.go.websocket.Message;
import com.thoughtworks.go.websocket.MessageEncoding;
import org.hamcrest.Matchers;
import org.junit.*;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Semaphore;
import static com.thoughtworks.go.helper.ModificationsMother.modifyNoFiles;
import static com.thoughtworks.go.helper.ModificationsMother.modifySomeFiles;
import static com.thoughtworks.go.util.GoConstants.DEFAULT_APPROVED_BY;
import static com.thoughtworks.go.util.TestUtils.sleepQuietly;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.hamcrest.core.IsNot.not;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:WEB-INF/applicationContext-acegi-security.xml"
})
public class BuildAssignmentServiceIntegrationTest {
@Autowired private BuildAssignmentService buildAssignmentService;
@Autowired private GoConfigService goConfigService;
@Autowired private GoConfigDao goConfigDao;
@Autowired private PipelineDao pipelineDao;
@Autowired private JobInstanceDao jobInstanceDao;
@Autowired private AgentService agentService;
@Autowired private AgentAssignment agentAssignment;
@Autowired private ScheduleService scheduleService;
@Autowired private MaterialRepository materialRepository;
@Autowired private DatabaseAccessHelper dbHelper;
@Autowired private ScheduleHelper scheduleHelper;
@Autowired private GoCache goCache;
@Autowired private StageDao stageDao;
@Autowired private JobInstanceService jobInstanceService;
@Autowired private PipelineService pipelineService;
@Autowired private EnvironmentConfigService environmentConfigService;
@Autowired private TimeProvider timeProvider;
@Autowired private TransactionTemplate transactionTemplate;
@Autowired private BuilderFactory builderFactory;
@Autowired private InstanceFactory instanceFactory;
@Autowired private AgentRemoteHandler agentRemoteHandler;
@Autowired private PipelineConfigService pipelineConfigService;
@Autowired private EntityHashingService entityHashingService;
@Autowired private ElasticAgentPluginService elasticAgentPluginService;
private PipelineConfig evolveConfig;
private static final String STAGE_NAME = "dev";
private GoConfigFileHelper configHelper;
private ScheduleTestUtil u;
public Subversion repository;
public static TestRepo testRepo;
private PipelineWithTwoStages fixture;
private String md5 = "md5-test";
private Username loserUser = new Username(new CaseInsensitiveString("loser"));
private AgentStub agent;
private ConfigCache configCache;
private ConfigElementImplementationRegistry registry;
@BeforeClass
public static void setupRepos() throws IOException {
testRepo = new SvnTestRepo("testSvnRepo");
}
@AfterClass
public static void tearDownConfigFileLocation() throws IOException {
TestRepo.internalTearDown();
}
@Before
public void setUp() throws Exception {
configCache = new ConfigCache();
registry = ConfigElementImplementationRegistryMother.withNoPlugins();
configHelper = new GoConfigFileHelper().usingCruiseConfigDao(goConfigDao);
configHelper.onSetUp();
dbHelper.onSetUp();
fixture = new PipelineWithTwoStages(materialRepository, transactionTemplate);
fixture.usingConfigHelper(configHelper).usingDbHelper(dbHelper).onSetUp();
repository = new SvnCommand(null, testRepo.projectRepositoryUrl());
evolveConfig = configHelper.addPipeline("evolve", STAGE_NAME, repository, "unit");
configHelper.addPipeline("anotherPipeline", STAGE_NAME, repository, "anotherTest");
configHelper.addPipeline("thirdPipeline", STAGE_NAME, repository, "yetAnotherTest");
goConfigService.forceNotifyListeners();
goCache.clear();
u = new ScheduleTestUtil(transactionTemplate, materialRepository, dbHelper, configHelper);
agent = new AgentStub();
}
@After
public void teardown() throws Exception {
goCache.clear();
agentService.clearAll();
fixture.onTearDown();
dbHelper.onTearDown();
configHelper.onTearDown();
FileUtil.deleteFolder(goConfigService.artifactsDir());
agentAssignment.clear();
agentRemoteHandler.connectedAgents().clear();
}
@Test
public void shouldRescheduleAbandonedBuild() throws SQLException {
AgentIdentifier instance = agent(AgentMother.localAgent());
Pipeline pipeline = instanceFactory.createPipelineInstance(evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(
DEFAULT_APPROVED_BY), md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(pipeline);
buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig());
buildAssignmentService.onTimer();
buildAssignmentService.assignWorkToAgent(instance);
long firstAssignedBuildId = buildOf(pipeline).getId();
//somehow agent abandoned its original build...
buildAssignmentService.assignWorkToAgent(instance);
JobInstance reloaded = jobInstanceDao.buildByIdWithTransitions(firstAssignedBuildId);
assertThat(reloaded.getState(), is(JobState.Rescheduled));
assertThat(reloaded.isIgnored(), is(true));
}
@Test
public void shouldNotAssignWorkToDeniedAgent() throws Exception {
AgentConfig deniedAgentConfig = AgentMother.localAgent();
deniedAgentConfig.disable();
Work assignedWork = buildAssignmentService.assignWorkToAgent(agent(deniedAgentConfig));
assertThat(assignedWork, instanceOf(DeniedAgentWork.class));
}
@Test
public void shouldNotAssignWorkWhenPipelineScheduledWithStaleMaterials() {
AgentIdentifier instance = agent(AgentMother.localAgent());
Pipeline pipeline = instanceFactory.createPipelineInstance(evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(pipeline);
evolveConfig.setMaterialConfigs(new MaterialConfigs(new HgMaterialConfig("foo", null)));
configHelper.removePipeline(CaseInsensitiveString.str(evolveConfig.name()));
configHelper.addPipeline(evolveConfig);
buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig());
JobInstance job = buildOf(pipeline);
jobInstanceDao.updateStateAndResult(job);
assertThat(buildAssignmentService.assignWorkToAgent(instance), is((Work) BuildAssignmentService.NO_WORK));
}
@Test
public void shouldNotAssignCancelledJob() throws Exception {
AgentIdentifier instance = agent(AgentMother.localAgent());
Pipeline pipeline = instanceFactory.createPipelineInstance(evolveConfig, modifyNoFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(pipeline);
buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig());
JobInstance job = buildOf(pipeline);
job.cancel();
jobInstanceDao.updateStateAndResult(job);
assertThat(buildAssignmentService.assignWorkToAgent(instance), is((Work) BuildAssignmentService.NO_WORK));
}
@Test
public void shouldUpdateNumberOfActiveRemoteAgentsAfterAssigned() {
AgentConfig agentConfig = AgentMother.remoteAgent();
configHelper.addAgent(agentConfig);
fixture.createPipelineWithFirstStageScheduled();
buildAssignmentService.onTimer();
AgentInstance agent = agentService.findAgent(agentConfig.getUuid());
assertFalse(agent.isBuilding());
Work work = buildAssignmentService.assignWorkToAgent(agent(agentConfig));
assertThat(work, instanceOf(BuildWork.class));
assertTrue(agent.isBuilding());
}
@Test
public void shouldCancelOutOfDateBuilds() throws Exception {
fixture.createPipelineWithFirstStageScheduled();
buildAssignmentService.onTimer();
configHelper.removeStage(fixture.pipelineName, fixture.devStage);
buildAssignmentService.onConfigChange(goConfigService.getCurrentConfig());
Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
JobInstance job = pipeline.getFirstStage().getJobInstances().first();
assertThat(job.getState(), is(JobState.Completed));
assertThat(job.getResult(), is(JobResult.Cancelled));
}
@Test
public void shouldCancelBuildsForDeletedStagesWhenPipelineConfigChanges() throws Exception {
buildAssignmentService.initialize();
fixture.createPipelineWithFirstStageScheduled();
buildAssignmentService.onTimer();
PipelineConfig pipelineConfig = new Cloner().deepClone(configHelper.getCachedGoConfig().currentConfig().getPipelineConfigByName(new CaseInsensitiveString(fixture.pipelineName)));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
StageConfig devStage = pipelineConfig.findBy(new CaseInsensitiveString(fixture.devStage));
pipelineConfig.remove(devStage);
pipelineConfigService.updatePipelineConfig(loserUser, pipelineConfig, md5, new HttpLocalizedOperationResult());
Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
JobInstance job = pipeline.getFirstStage().getJobInstances().first();
assertThat(job.getState(), is(JobState.Completed));
assertThat(job.getResult(), is(JobResult.Cancelled));
}
@Test
public void shouldCancelBuildsForDeletedJobsWhenPipelineConfigChanges() throws Exception {
buildAssignmentService.initialize();
fixture = new PipelineWithTwoStages(materialRepository, transactionTemplate).usingTwoJobs();
fixture.usingConfigHelper(configHelper).usingDbHelper(dbHelper).onSetUp();
fixture.createPipelineWithFirstStageScheduled();
buildAssignmentService.onTimer();
PipelineConfig pipelineConfig = new Cloner().deepClone(configHelper.getCachedGoConfig().currentConfig().getPipelineConfigByName(new CaseInsensitiveString(fixture.pipelineName)));
String xml = new MagicalGoConfigXmlWriter(configCache, registry).toXmlPartial(pipelineConfig);
String md5 = CachedDigestUtils.md5Hex(xml);
StageConfig devStage = pipelineConfig.findBy(new CaseInsensitiveString(fixture.devStage));
devStage.getJobs().remove(devStage.jobConfigByConfigName(new CaseInsensitiveString(fixture.JOB_FOR_DEV_STAGE)));
pipelineConfigService.updatePipelineConfig(loserUser, pipelineConfig, md5, new HttpLocalizedOperationResult());
Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
JobInstance deletedJob = pipeline.getFirstStage().getJobInstances().getByName(fixture.JOB_FOR_DEV_STAGE);
assertThat(deletedJob.getState(), is(JobState.Completed));
assertThat(deletedJob.getResult(), is(JobResult.Cancelled));
JobInstance retainedJob = pipeline.getFirstStage().getJobInstances().getByName(fixture.DEV_STAGE_SECOND_JOB);
assertThat(retainedJob.getState(), is(JobState.Scheduled));
assertThat(retainedJob.getResult(), is(JobResult.Unknown));
}
@Test
public void shouldCancelBuildBelongingToNonExistentPipeline() throws Exception {
fixture.createPipelineWithFirstStageScheduled();
buildAssignmentService.onTimer();
configHelper.removePipeline(fixture.pipelineName);
AgentConfig agentConfig = AgentMother.localAgent();
agentConfig.addResource(new Resource("some-other-resource"));
assertThat((NoWork) buildAssignmentService.assignWorkToAgent(agent(agentConfig)), Matchers.is(BuildAssignmentService.NO_WORK));
Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
JobInstance job = pipeline.getFirstStage().getJobInstances().first();
assertThat(job.getState(), is(JobState.Completed));
assertThat(job.getResult(), is(JobResult.Cancelled));
Stage stage = stageDao.findStageWithIdentifier(job.getIdentifier().getStageIdentifier());
assertThat(stage.getState(), is(StageState.Cancelled));
assertThat(stage.getResult(), is(StageResult.Cancelled));
}
@Test
public void shouldNotReloadScheduledJobPlansWhenAgentWorkAssignmentIsInProgress() throws Exception {
fixture.createPipelineWithFirstStageScheduled();
Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
JobInstance job = pipeline.getFirstStage().getJobInstances().first();
final JobInstanceService mockJobInstanceService = mock(JobInstanceService.class);
final Pipeline pipeline1 = pipeline;
final Semaphore sem = new Semaphore(1);
sem.acquire();
when(mockJobInstanceService.orderedScheduledBuilds()).thenReturn(jobInstanceService.orderedScheduledBuilds());
when(mockJobInstanceService.buildByIdWithTransitions(job.getId())).thenReturn(jobInstanceService.buildByIdWithTransitions(job.getId()));
ScheduledPipelineLoader scheduledPipelineLoader = new ScheduledPipelineLoader(null, null, null, null, null, null, null, null) {
@Override
public Pipeline pipelineWithPasswordAwareBuildCauseByBuildId(long buildId) {
sem.release();
sleepQuietly(1000);
verify(mockJobInstanceService, times(1)).orderedScheduledBuilds();
return pipeline1;
}
};
final BuildAssignmentService buildAssignmentServiceUnderTest = new BuildAssignmentService(goConfigService, mockJobInstanceService, scheduleService,
agentService, environmentConfigService, transactionTemplate, scheduledPipelineLoader, pipelineService, builderFactory, agentRemoteHandler, elasticAgentPluginService, timeProvider);
final Throwable[] fromThread = new Throwable[1];
buildAssignmentServiceUnderTest.onTimer();
Thread assigner = new Thread(new Runnable() {
public void run() {
try {
final AgentConfig agentConfig = AgentMother.localAgentWithResources("some-other-resource");
buildAssignmentServiceUnderTest.assignWorkToAgent(agent(agentConfig));
} catch (Throwable e) {
e.printStackTrace();
fromThread[0] = e;
} finally {
}
}
}, "assignmentThread");
assigner.start();
sem.acquire();
buildAssignmentServiceUnderTest.onTimer();
assigner.join();
assertThat(fromThread[0], is(nullValue()));
}
@Test
public void shouldCancelBuildBelongingToNonExistentPipelineWhenCreatingWork() throws Exception {
fixture.createPipelineWithFirstStageScheduled();
Pipeline pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
ScheduledPipelineLoader scheduledPipelineLoader = mock(ScheduledPipelineLoader.class);
when(scheduledPipelineLoader.pipelineWithPasswordAwareBuildCauseByBuildId(pipeline.getFirstStage().getJobInstances().first().getId())).thenThrow(
new PipelineNotFoundException("thrown by mockPipelineService"));
GoConfigService mockGoConfigService = mock(GoConfigService.class);
CruiseConfig config = configHelper.currentConfig();
configHelper.removePipeline(fixture.pipelineName, config);
when(mockGoConfigService.getCurrentConfig()).thenReturn(config);
buildAssignmentService = new BuildAssignmentService(mockGoConfigService, jobInstanceService, scheduleService, agentService, environmentConfigService,
transactionTemplate, scheduledPipelineLoader, pipelineService, builderFactory, agentRemoteHandler, elasticAgentPluginService, timeProvider);
buildAssignmentService.onTimer();
AgentConfig agentConfig = AgentMother.localAgent();
agentConfig.addResource(new Resource("some-other-resource"));
try {
buildAssignmentService.assignWorkToAgent(agent(agentConfig));
fail("should have thrown PipelineNotFoundException");
} catch (PipelineNotFoundException e) {
// ok
}
pipeline = pipelineDao.mostRecentPipeline(fixture.pipelineName);
JobInstance job = pipeline.getFirstStage().getJobInstances().first();
assertThat(job.getState(), is(JobState.Completed));
assertThat(job.getResult(), is(JobResult.Cancelled));
Stage stage = stageDao.findStageWithIdentifier(job.getIdentifier().getStageIdentifier());
assertThat(stage.getState(), is(StageState.Cancelled));
assertThat(stage.getResult(), is(StageResult.Cancelled));
}
@Test
public void shouldBeAbleToSerializeAndDeserializeBuildWork() throws Exception {
Pipeline pipeline1 = instanceFactory.createPipelineInstance(evolveConfig, modifySomeFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(pipeline1);
buildAssignmentService.onTimer();
BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgent()));
BuildWork deserialized = (BuildWork) SerializationTester.serializeAndDeserialize(work);
assertThat(deserialized.getAssignment().materialRevisions(), is(work.getAssignment().materialRevisions()));
assertThat(deserialized.getAssignment(), is(work.getAssignment()));
assertThat(deserialized, is(work));
}
@Test
public void shouldCreateWorkWithFetchMaterialsFlagFromStageConfig() throws Exception {
evolveConfig.getFirstStageConfig().setFetchMaterials(true);
Pipeline pipeline1 = instanceFactory.createPipelineInstance(evolveConfig, modifySomeFiles(evolveConfig), new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(pipeline1);
buildAssignmentService.onTimer();
BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgent()));
assertThat("should have set fetchMaterials on assignment", work.getAssignment().getPlan().shouldFetchMaterials(), is(true));
}
/**
* (uppest/2/uppest-stage/1)
* |------------------> upper-peer -------
* | ...................................|...............................................
* | . | .
* [ uppest-stage ............................|...................... {bar.zip uppest/upper-peer/downer}
* V .
* uppest uppest-stage-2 ------> upper ------> downer ------> downest {foo.zip uppest/upper/downer}
* (uppest/1/uppest-stage-2/1)
* uppest-stage-3 ]
* <p/>
* .... :: fetch artifact call
* ---> :: material dependency
*/
@Test
public void shouldCreateWork_withAncestorFetchArtifactCalls_resolvedToRelevantStage() throws Exception {
configHelper.addPipeline("uppest", "uppest-stage");
configHelper.addStageToPipeline("uppest", "uppest-stage-2");
PipelineConfig uppest = configHelper.addStageToPipeline("uppest", "uppest-stage-3");
configHelper.addPipeline("upper", "upper-stage");
DependencyMaterial upper_sMaterial = new DependencyMaterial(new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage-2"));
PipelineConfig upper = configHelper.setMaterialConfigForPipeline("upper", upper_sMaterial.config());
configHelper.addPipeline("upper-peer", "upper-peer-stage");
DependencyMaterial upperPeer_sMaterial = new DependencyMaterial(new CaseInsensitiveString("uppest"), new CaseInsensitiveString("uppest-stage"));
PipelineConfig upperPeer = configHelper.setMaterialConfigForPipeline("upper-peer", upperPeer_sMaterial.config());
configHelper.addPipeline("downer", "downer-stage");
DependencyMaterial downer_sUpperMaterial = new DependencyMaterial(new CaseInsensitiveString("upper"), new CaseInsensitiveString("upper-stage"));
configHelper.setMaterialConfigForPipeline("downer", downer_sUpperMaterial.config());
DependencyMaterial downer_sUpperPeerMaterial = new DependencyMaterial(new CaseInsensitiveString("upper-peer"), new CaseInsensitiveString("upper-peer-stage"));
PipelineConfig downer = configHelper.addMaterialToPipeline("downer", downer_sUpperPeerMaterial.config());
configHelper.addPipeline("downest", "downest-stage");
DependencyMaterial downest_sMaterial = new DependencyMaterial(new CaseInsensitiveString("downer"), new CaseInsensitiveString("downer-stage"));
configHelper.setMaterialConfigForPipeline("downest", downest_sMaterial.config());
Tasks allFetchTasks = new Tasks();
allFetchTasks.add(new FetchTask(new CaseInsensitiveString("uppest/upper/downer"), new CaseInsensitiveString("uppest-stage"), new CaseInsensitiveString("unit"), "foo.zip", "bar"));
allFetchTasks.add(new FetchTask(new CaseInsensitiveString("uppest/upper-peer/downer"), new CaseInsensitiveString("uppest-stage"), new CaseInsensitiveString("unit"), "bar.zip", "baz"));
configHelper.replaceAllJobsInStage("downest", "downest-stage", new JobConfig(new CaseInsensitiveString("fetcher"), new Resources("fetcher"), new ArtifactPlans(), allFetchTasks));
PipelineConfig downest = goConfigService.getCurrentConfig().pipelineConfigByName(new CaseInsensitiveString("downest"));
DefaultSchedulingContext defaultSchedulingCtx = new DefaultSchedulingContext(DEFAULT_APPROVED_BY);
Pipeline uppestInstanceForUpper = instanceFactory.createPipelineInstance(uppest, modifySomeFiles(uppest), defaultSchedulingCtx, md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpper);
dbHelper.passStage(uppestInstanceForUpper.findStage("uppest-stage"));
Stage upper_sMaterialStage = dbHelper.scheduleStage(uppestInstanceForUpper, uppest.getStage(new CaseInsensitiveString("uppest-stage-2")));
dbHelper.passStage(upper_sMaterialStage);
Pipeline uppestInstanceForUpperPeer = instanceFactory.createPipelineInstance(uppest, modifySomeFiles(uppest), new DefaultSchedulingContext("super-hero"), md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(uppestInstanceForUpperPeer);
Stage upperPeer_sMaterialStage = uppestInstanceForUpperPeer.findStage("uppest-stage");
dbHelper.passStage(upperPeer_sMaterialStage);
Pipeline upperInstance = instanceFactory.createPipelineInstance(upper, buildCauseForDependency(upper_sMaterial, upper_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(upperInstance);
Stage downer_sUpperMaterialStage = upperInstance.findStage("upper-stage");
dbHelper.passStage(downer_sUpperMaterialStage);
Pipeline upperPeerInstance = instanceFactory.createPipelineInstance(upperPeer, buildCauseForDependency(upperPeer_sMaterial, upperPeer_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(upperPeerInstance);
Stage downer_sUpperPeerMaterialStage = upperPeerInstance.findStage("upper-peer-stage");
dbHelper.passStage(downer_sUpperPeerMaterialStage);
MaterialRevisions downer_sMaterialRevisions = new MaterialRevisions(
materialRevisionForDownstream(downer_sUpperMaterial, downer_sUpperMaterialStage),
materialRevisionForDownstream(downer_sUpperPeerMaterial, downer_sUpperPeerMaterialStage));
Pipeline downerInstance = instanceFactory.createPipelineInstance(downer, BuildCause.createManualForced(downer_sMaterialRevisions, loserUser), defaultSchedulingCtx, md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(downerInstance);
Stage downest_sMaterialStage = downerInstance.findStage("downer-stage");
dbHelper.passStage(downest_sMaterialStage);
Pipeline downestInstance = instanceFactory.createPipelineInstance(downest, buildCauseForDependency(downest_sMaterial, downest_sMaterialStage), defaultSchedulingCtx, md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(downestInstance);
buildAssignmentService.onTimer();
AgentConfig agentConfig = AgentMother.localAgent();
agentConfig.addResource(new Resource("fetcher"));
BuildWork work = (BuildWork) buildAssignmentService.assignWorkToAgent(agent(agentConfig));
List<Builder> builders = work.getAssignment().getBuilders();
FetchArtifactBuilder fooZipFetch = (FetchArtifactBuilder) builders.get(0);
assertThat(fooZipFetch.artifactLocator(), is("uppest/1/uppest-stage/latest/unit/foo.zip"));
FetchArtifactBuilder barZipFetch = (FetchArtifactBuilder) builders.get(1);
assertThat(barZipFetch.artifactLocator(), is("uppest/2/uppest-stage/1/unit/bar.zip"));
}
private BuildCause buildCauseForDependency(DependencyMaterial material, Stage upstreamStage) {
return BuildCause.createManualForced(new MaterialRevisions(materialRevisionForDownstream(material, upstreamStage)), loserUser);
}
private MaterialRevision materialRevisionForDownstream(DependencyMaterial material, Stage upstreamStage) {
StageIdentifier identifier = upstreamStage.getIdentifier();
String rev = identifier.getStageLocator();
String pipelineLabel = identifier.getPipelineLabel();
return new MaterialRevision(material, new Modification(new Date(), rev, pipelineLabel, upstreamStage.getPipelineId()));
}
private AgentIdentifier agent(AgentConfig agentConfig) {
agentService.sync(new Agents(agentConfig));
agentService.approve(agentConfig.getUuid());
return agentService.findAgent(agentConfig.getUuid()).getAgentIdentifier();
}
@Test
public void shouldNotScheduleIfAgentDoesNotHaveResources() throws Exception {
JobConfig plan = evolveConfig.findBy(new CaseInsensitiveString(STAGE_NAME)).jobConfigByInstanceName("unit", true);
plan.addResource("some-resource");
scheduleHelper.schedule(evolveConfig, modifySomeFiles(evolveConfig), DEFAULT_APPROVED_BY);
Work work = buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgent()));
Pipeline pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name()));
JobInstance job = pipeline.findStage(STAGE_NAME).findJob("unit");
assertThat(work, is((Work) BuildAssignmentService.NO_WORK));
assertThat(job.getState(), is(JobState.Scheduled));
assertThat(job.getAgentUuid(), is(nullValue()));
}
@Test
public void shouldNotScheduleIfAgentDoesNotHaveMatchingResources() throws Exception {
JobConfig plan = evolveConfig.findBy(new CaseInsensitiveString(STAGE_NAME)).jobConfigByInstanceName("unit", true);
plan.addResource("some-resource");
scheduleHelper.schedule(evolveConfig, modifySomeFiles(evolveConfig), DEFAULT_APPROVED_BY);
AgentConfig agentConfig = AgentMother.localAgent();
agentConfig.addResource(new Resource("some-other-resource"));
Work work = buildAssignmentService.assignWorkToAgent(agent(agentConfig));
assertThat(work, is((Work) BuildAssignmentService.NO_WORK));
Pipeline pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name()));
JobInstance job = pipeline.findStage(STAGE_NAME).findJob("unit");
assertThat(job.getState(), is(JobState.Scheduled));
assertThat(job.getAgentUuid(), is(nullValue()));
}
@Test
public void shouldScheduleIfAgentMatchingResources() throws Exception {
JobConfig plan = evolveConfig.findBy(new CaseInsensitiveString(STAGE_NAME)).jobConfigByInstanceName("unit", true);
plan.addResource("some-resource");
scheduleHelper.schedule(evolveConfig, modifySomeFiles(evolveConfig), DEFAULT_APPROVED_BY);
AgentConfig agentConfig = AgentMother.localAgent();
agentConfig.addResource(new Resource("some-resource"));
buildAssignmentService.onTimer();
Work work = buildAssignmentService.assignWorkToAgent(agent(agentConfig));
assertThat(work, is(not((Work) BuildAssignmentService.NO_WORK)));
Pipeline pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name()));
JobInstance job = pipeline.findStage(STAGE_NAME).findJob("unit");
JobPlan loadedPlan = jobInstanceDao.loadPlan(job.getId());
assertThat(loadedPlan.getResources(), is((List<Resource>) plan.resources()));
assertThat(job.getState(), is(JobState.Assigned));
assertThat(job.getAgentUuid(), is(agentConfig.getUuid()));
}
@Test
public void shouldReScheduleToCorrectAgent() throws Exception {
JobConfig plan = evolveConfig.findBy(new CaseInsensitiveString(STAGE_NAME)).jobConfigByInstanceName("unit", true);
plan.addResource("some-resource");
scheduleHelper.schedule(evolveConfig, modifySomeFiles(evolveConfig), DEFAULT_APPROVED_BY);
buildAssignmentService.onTimer();
AgentConfig agentConfig = AgentMother.localAgent();
agentConfig.addResource(new Resource("some-resource"));
Work work = buildAssignmentService.assignWorkToAgent(agent(agentConfig));
assertThat(work, is(not((Work) BuildAssignmentService.NO_WORK)));
Pipeline pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name()));
JobInstance job = pipeline.findStage(STAGE_NAME).findJob("unit");
JobInstance runningJob = jobInstanceDao.buildByIdWithTransitions(job.getId());
scheduleService.rescheduleJob(runningJob);
pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(evolveConfig.name()));
JobInstance rescheduledJob = pipeline.findStage(STAGE_NAME).findJob("unit");
assertThat(rescheduledJob.getId(), not(runningJob.getId()));
buildAssignmentService.onTimer();
Work noResourcesWork = buildAssignmentService.assignWorkToAgent(agent(AgentMother.localAgentWithResources("WITHOUT_RESOURCES")));
assertThat(noResourcesWork, is((Work) BuildAssignmentService.NO_WORK));
buildAssignmentService.onTimer();
Work correctAgentWork = buildAssignmentService.assignWorkToAgent(agent(agentConfig));
assertThat(correctAgentWork, is(not((Work) BuildAssignmentService.NO_WORK)));
}
@Test
public void shouldRemoveAllJobPlansThatAreNotInConfig() {
CruiseConfig oldConfig = goConfigService.getCurrentConfig();
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("p1", "s1", u.m(new HgMaterial("hg", null)));
Pipeline p1_1 = instanceFactory.createPipelineInstance(p1.config, modifyNoFiles(p1.config), new DefaultSchedulingContext(
DEFAULT_APPROVED_BY), md5, new TimeProvider());
ScheduleTestUtil.AddedPipeline p2 = u.saveConfigWith("p2", "s1", u.m(new HgMaterial("hg", null)));
Pipeline p2_1 = instanceFactory.createPipelineInstance(p2.config, modifyNoFiles(p2.config), new DefaultSchedulingContext(
DEFAULT_APPROVED_BY), md5, new TimeProvider());
dbHelper.savePipelineWithStagesAndMaterials(p1_1);
dbHelper.savePipelineWithStagesAndMaterials(p2_1);
CruiseConfig cruiseConfig = goConfigService.getCurrentConfig();
buildAssignmentService.onConfigChange(cruiseConfig);
buildAssignmentService.onTimer();
List<JobPlan> plans = (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans");
assertThat(plans.isEmpty(), is(false));
assertThat(plans.size(), is(2));
configHelper.writeConfigFile(oldConfig);
plans = (List<JobPlan>) ReflectionUtil.getField(buildAssignmentService, "jobPlans");
assertThat("Actual size is " + plans.size(), plans.isEmpty(), is(true));
}
@Test
public void shouldCancelAScheduledJobInCaseThePipelineIsRemovedFromTheConfig_SpecificallyAPipelineRenameToADifferentCaseAndStageNameToADifferentName() throws Exception {
Material hgMaterial = new HgMaterial("url", "folder");
String[] hgRevs = new String[]{"h1"};
u.checkinInOrder(hgMaterial, hgRevs);
ScheduleTestUtil.AddedPipeline p1 = u.saveConfigWith("PIPELINE_WHICH_WILL_EVENTUALLY_CHANGE_CASE", u.m(hgMaterial));
u.scheduleWith(p1, hgRevs);
ScheduleTestUtil.AddedPipeline renamedPipeline = u.renamePipelineAndFirstStage(p1, "pipeline_which_will_eventually_change_case", "NEW_RANDOM_STAGE_NAME" + UUID.randomUUID());
Pipeline p1_2 = u.scheduleWith(renamedPipeline, hgRevs);
CruiseConfig cruiseConfig = configHelper.load();
buildAssignmentService.onTimer(); // To Reload Job Plans
buildAssignmentService.onConfigChange(cruiseConfig);
Stages allStages = stageDao.findAllStagesFor(p1_2.getName(), p1_2.getCounter());
assertThat(allStages.byName(CaseInsensitiveString.str(p1.config.first().name())).getState(), is(StageState.Cancelled));
}
@Test
public void shouldAssignMatchedJobToAgentsRegisteredInAgentRemoteHandler() {
AgentConfig agentConfig = AgentMother.remoteAgent();
configHelper.addAgent(agentConfig);
fixture.createPipelineWithFirstStageScheduled();
AgentRuntimeInfo info = AgentRuntimeInfo.fromServer(agentConfig, true, "location", 1000000l, "OS", false);
info.setCookie("cookie");
agentRemoteHandler.process(agent, new Message(Action.ping, MessageEncoding.encodeData(info)));
AgentInstance agent = agentService.findAgent(agentConfig.getUuid());
assertFalse(agent.isBuilding());
buildAssignmentService.onTimer();
assertThat(this.agent.messages.size(), is(1));
assertThat(MessageEncoding.decodeWork(this.agent.messages.get(0).getData()), instanceOf(BuildWork.class));
assertTrue(agent.isBuilding());
}
@Test
public void shouldNotAssignNoWorkToAgentsRegisteredInAgentRemoteHandler() {
AgentConfig agentConfig = AgentMother.remoteAgent();
configHelper.addAgent(agentConfig);
fixture.createdPipelineWithAllStagesPassed();
AgentRuntimeInfo info = AgentRuntimeInfo.fromServer(agentConfig, true, "location", 1000000l, "OS", false);
info.setCookie("cookie");
agentRemoteHandler.process(agent, new Message(Action.ping, MessageEncoding.encodeData(info)));
buildAssignmentService.onTimer();
assertThat(agent.messages.size(), is(0));
}
@Test
public void shouldNotAssignDeniedAgentWorkToAgentsRegisteredInAgentRemoteHandler() {
AgentConfig agentConfig = AgentMother.remoteAgent();
agentConfig.disable();
configHelper.addAgent(agentConfig);
fixture.createPipelineWithFirstStageScheduled();
AgentRuntimeInfo info = AgentRuntimeInfo.fromServer(agentConfig, true, "location", 1000000l, "OS", false);
info.setCookie("cookie");
agentRemoteHandler.process(agent, new Message(Action.ping, MessageEncoding.encodeData(info)));
buildAssignmentService.onTimer();
assertThat(agent.messages.size(), is(0));
}
@Test
public void shouldOnlyAssignWorkToIdleAgentsRegisteredInAgentRemoteHandler() {
AgentConfig agentConfig = AgentMother.remoteAgent();
configHelper.addAgent(agentConfig);
fixture.createPipelineWithFirstStageScheduled();
AgentRuntimeInfo info = AgentRuntimeInfo.fromServer(agentConfig, true, "location", 1000000l, "OS", false);
info.setCookie("cookie");
AgentStatus[] statuses = new AgentStatus[] {
AgentStatus.Building, AgentStatus.Pending,
AgentStatus.Disabled, AgentStatus.Disabled,
AgentStatus.LostContact, AgentStatus.Missing
};
for (AgentStatus status : statuses) {
info.setStatus(status);
agent = new AgentStub();
agentRemoteHandler.process(agent, new Message(Action.ping, MessageEncoding.encodeData(info)));
buildAssignmentService.onTimer();
assertThat("Should not assign work when agent status is " + status, agent.messages.size(), is(0));
}
}
@Test
public void shouldNotAssignWorkToCanceledAgentsRegisteredInAgentRemoteHandler() {
AgentConfig agentConfig = AgentMother.remoteAgent();
configHelper.addAgent(agentConfig);
fixture.createPipelineWithFirstStageScheduled();
AgentRuntimeInfo info = AgentRuntimeInfo.fromServer(agentConfig, true, "location", 1000000l, "OS", false);
info.setCookie("cookie");
agentRemoteHandler.process(agent, new Message(Action.ping, MessageEncoding.encodeData(info)));
AgentInstance agentInstance = agentService.findAgentAndRefreshStatus(info.getUUId());
agentInstance.cancel();
buildAssignmentService.onTimer();
assertThat("Should not assign work when agent status is Canceled", agent.messages.size(), is(0));
}
@Test
public void shouldCallForReregisterIfAgentInstanceIsNotRegistered() {
AgentConfig agentConfig = AgentMother.remoteAgent();
fixture.createPipelineWithFirstStageScheduled();
AgentRuntimeInfo info = AgentRuntimeInfo.fromServer(agentConfig, true, "location", 1000000l, "OS", false);
agentService.requestRegistration(new Username("bob"), info);
assertThat(agentService.findAgent(info.getUUId()).isRegistered(), is(false));
info.setCookie("cookie");
agentRemoteHandler.process(agent, new Message(Action.ping, MessageEncoding.encodeData(info)));
buildAssignmentService.onTimer();
assertThat(agent.messages.size(), is(1));
assertThat(agent.messages.get(0).getAction(), is(Action.reregister));
}
@Test
public void shouldAssignAgentsWhenThereAreAgentsAreDisabledOrNeedReregister() {
fixture.createPipelineWithFirstStageScheduled();
AgentConfig canceledAgentConfig = AgentMother.remoteAgent();
configHelper.addAgent(canceledAgentConfig);
AgentRuntimeInfo canceledAgentInfo = AgentRuntimeInfo.fromServer(canceledAgentConfig, true, "location", 1000000l, "OS", false);
canceledAgentInfo.setCookie("cookie1");
AgentStub canceledAgent = new AgentStub();
agentRemoteHandler.process(canceledAgent, new Message(Action.ping, MessageEncoding.encodeData(canceledAgentInfo)));
AgentInstance agentInstance = agentService.findAgentAndRefreshStatus(canceledAgentInfo.getUUId());
agentInstance.cancel();
AgentConfig needRegisterAgentConfig = AgentMother.remoteAgent();
AgentRuntimeInfo needRegisterAgentInfo = AgentRuntimeInfo.fromServer(needRegisterAgentConfig, true, "location", 1000000l, "OS", false);
agentService.requestRegistration(new Username("bob"), needRegisterAgentInfo);
needRegisterAgentInfo.setCookie("cookie2");
AgentStub needRegisterAgent = new AgentStub();
agentRemoteHandler.process(needRegisterAgent, new Message(Action.ping, MessageEncoding.encodeData(needRegisterAgentInfo)));
AgentConfig assignedAgent = AgentMother.remoteAgent();
configHelper.addAgent(assignedAgent);
AgentRuntimeInfo assignedAgentInfo = AgentRuntimeInfo.fromServer(assignedAgent, true, "location", 1000000l, "OS", false);
assignedAgentInfo.setCookie("cookie3");
agentRemoteHandler.process(agent, new Message(Action.ping, MessageEncoding.encodeData(assignedAgentInfo)));
buildAssignmentService.onTimer();
assertThat(canceledAgent.messages.size(), is(0));
assertThat(needRegisterAgent.messages.size(), is(1));
assertThat(needRegisterAgent.messages.get(0).getAction(), is(Action.reregister));
assertThat(agent.messages.size(), is(1));
assertThat(MessageEncoding.decodeWork(agent.messages.get(0).getData()), instanceOf(BuildWork.class));
}
private JobInstance buildOf(Pipeline pipeline) {
return pipeline.getStages().first().getJobInstances().first();
}
}
| |
package org.apache.helix.alerts;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.HashMap;
import java.util.Map;
import org.apache.helix.HelixDataAccessor;
import org.apache.helix.ZNRecord;
import org.apache.helix.Mocks.MockManager;
import org.apache.helix.PropertyKey.Builder;
import org.apache.helix.alerts.StatsHolder;
import org.apache.helix.alerts.Tuple;
import org.apache.helix.controller.stages.HealthDataCache;
import org.testng.AssertJUnit;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
public class TestArrivingParticipantStats
{
protected static final String CLUSTER_NAME = "TestCluster";
MockManager _helixManager;
StatsHolder _statsHolder;
@BeforeMethod(groups = { "unitTest" })
public void setup()
{
_helixManager = new MockManager(CLUSTER_NAME);
_statsHolder = new StatsHolder(_helixManager, new HealthDataCache());
}
public Map<String, String> getStatFields(String value, String timestamp)
{
Map<String, String> statMap = new HashMap<String, String>();
statMap.put(StatsHolder.VALUE_NAME, value);
statMap.put(StatsHolder.TIMESTAMP_NAME, timestamp);
return statMap;
}
public boolean statRecordContains(ZNRecord rec, String statName)
{
Map<String, Map<String, String>> stats = rec.getMapFields();
return stats.containsKey(statName);
}
public boolean statRecordHasValue(ZNRecord rec, String statName, String value)
{
Map<String, Map<String, String>> stats = rec.getMapFields();
Map<String, String> statFields = stats.get(statName);
return (statFields.get(StatsHolder.VALUE_NAME).equals(value));
}
public boolean statRecordHasTimestamp(ZNRecord rec, String statName, String timestamp)
{
Map<String, Map<String, String>> stats = rec.getMapFields();
Map<String, String> statFields = stats.get(statName);
return (statFields.get(StatsHolder.TIMESTAMP_NAME).equals(timestamp));
}
// Exact matching persistent stat, but has no values yet
@Test(groups = { "unitTest" })
public void testAddFirstParticipantStat() throws Exception
{
// add a persistent stat
String persistentStat = "accumulate()(dbFoo.partition10.latency)";
_statsHolder.addStat(persistentStat);
// generate incoming stat
String incomingStatName = "dbFoo.partition10.latency";
Map<String, String> statFields = getStatFields("0", "0");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "0.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "0.0"));
}
// Exact matching persistent stat, but has no values yet
@Test(groups = { "unitTest" })
public void testAddRepeatParticipantStat() throws Exception
{
// add a persistent stat
String persistentStat = "accumulate()(dbFoo.partition10.latency)";
_statsHolder.addStat(persistentStat);
// generate incoming stat
String incomingStatName = "dbFoo.partition10.latency";
// apply stat once and then again
Map<String, String> statFields = getStatFields("0", "0");
_statsHolder.applyStat(incomingStatName, statFields);
statFields = getStatFields("1", "10");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "1.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "10.0"));
}
// test to ensure backdated stats not applied
@Test(groups = { "unitTest" })
public void testBackdatedParticipantStat() throws Exception
{
// add a persistent stat
String persistentStat = "accumulate()(dbFoo.partition10.latency)";
_statsHolder.addStat(persistentStat);
// generate incoming stat
String incomingStatName = "dbFoo.partition10.latency";
// apply stat once and then again
Map<String, String> statFields = getStatFields("0", "0");
_statsHolder.applyStat(incomingStatName, statFields);
statFields = getStatFields("1", "10");
_statsHolder.applyStat(incomingStatName, statFields);
statFields = getStatFields("5", "15");
_statsHolder.applyStat(incomingStatName, statFields);
statFields = getStatFields("1", "10");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "6.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "15.0"));
}
// Exact matching persistent stat, but has no values yet
@Test(groups = { "unitTest" })
public void testAddFirstParticipantStatToWildCard() throws Exception
{
// add a persistent stat
String persistentWildcardStat = "accumulate()(dbFoo.partition*.latency)";
_statsHolder.addStat(persistentWildcardStat);
// generate incoming stat
String incomingStatName = "dbFoo.partition10.latency";
Map<String, String> statFields = getStatFields("0", "0");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
String persistentStat = "accumulate()(dbFoo.partition10.latency)";
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "0.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "0.0"));
}
// test to add 2nd report to same stat
@Test(groups = { "unitTest" })
public void testAddSecondParticipantStatToWildCard() throws Exception
{
// add a persistent stat
String persistentWildcardStat = "accumulate()(dbFoo.partition*.latency)";
_statsHolder.addStat(persistentWildcardStat);
// generate incoming stat
String incomingStatName = "dbFoo.partition10.latency";
Map<String, String> statFields = getStatFields("1", "0");
_statsHolder.applyStat(incomingStatName, statFields);
statFields = getStatFields("1", "10");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
String persistentStat = "accumulate()(dbFoo.partition10.latency)";
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "2.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "10.0"));
}
// Exact matching persistent stat, but has no values yet
@Test(groups = { "unitTest" })
public void testAddParticipantStatToDoubleWildCard() throws Exception
{
// add a persistent stat
String persistentWildcardStat = "accumulate()(db*.partition*.latency)";
_statsHolder.addStat(persistentWildcardStat);
// generate incoming stat
String incomingStatName = "dbFoo.partition10.latency";
Map<String, String> statFields = getStatFields("0", "0");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
String persistentStat = "accumulate()(dbFoo.partition10.latency)";
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "0.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "0.0"));
}
@Test(groups = { "unitTest" })
public void testAddWildcardInFirstStatToken() throws Exception
{
String persistentWildcardStat = "accumulate()(instance*.reportingage)";
_statsHolder.addStat(persistentWildcardStat);
// generate incoming stat
String incomingStatName = "instance10.reportingage";
Map<String, String> statFields = getStatFields("1", "10");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
String persistentStat = "accumulate()(instance10.reportingage)";
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "1.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "10.0"));
}
// test to add report to same wildcard stat, different actual stat
@Test(groups = { "unitTest" })
public void testAddTwoDistinctParticipantStatsToSameWildCard() throws Exception
{
// add a persistent stat
String persistentWildcardStat = "accumulate()(dbFoo.partition*.latency)";
_statsHolder.addStat(persistentWildcardStat);
// generate incoming stat
String incomingStatName = "dbFoo.partition10.latency";
Map<String, String> statFields = getStatFields("1", "10");
_statsHolder.applyStat(incomingStatName, statFields);
incomingStatName = "dbFoo.partition11.latency";
statFields = getStatFields("5", "10");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
String persistentStat = "accumulate()(dbFoo.partition10.latency)";
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "1.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "10.0"));
persistentStat = "accumulate()(dbFoo.partition11.latency)";
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "5.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "10.0"));
}
// Exact matching persistent stat, but has no values yet
@Test(groups = { "unitTest" })
public void testWindowStat() throws Exception
{
// add a persistent stat
String persistentWildcardStat = "window(3)(dbFoo.partition*.latency)";
_statsHolder.addStat(persistentWildcardStat);
// generate incoming stat
String incomingStatName = "dbFoo.partition10.latency";
Map<String, String> statFields = getStatFields("0", "0");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
String persistentStat = "window(3)(dbFoo.partition10.latency)";
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "0.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "0.0"));
// add 2nd stat
statFields = getStatFields("10", "1");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "0.0,10.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "0.0,1.0"));
// add 3rd stat
statFields = getStatFields("20", "2");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "0.0,10.0,20.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "0.0,1.0,2.0"));
}
@Test(groups = { "unitTest" })
public void testWindowStatExpiration() throws Exception
{
String persistentWildcardStat = "window(3)(dbFoo.partition*.latency)";
String persistentStat = "window(3)(dbFoo.partition10.latency)";
// init with 3 elements
testWindowStat();
String incomingStatName = "dbFoo.partition10.latency";
Map<String, String> statFields = getStatFields("30", "3");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "10.0,20.0,30.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "1.0,2.0,3.0"));
}
@Test(groups = { "unitTest" })
public void testWindowStatStale() throws Exception
{
String persistentWildcardStat = "window(3)(dbFoo.partition*.latency)";
String persistentStat = "window(3)(dbFoo.partition10.latency)";
// init with 3 elements
testWindowStat();
String incomingStatName = "dbFoo.partition10.latency";
Map<String, String> statFields = getStatFields("10", "1");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "0.0,10.0,20.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "0.0,1.0,2.0"));
}
// test that has 2 agg stats for same raw stat
// Exact matching persistent stat, but has no values yet
@Test(groups = { "unitTest" })
public void testAddStatForTwoAggTypes() throws Exception
{
// add a persistent stat
String persistentStatOne = "accumulate()(dbFoo.partition10.latency)";
String persistentStatTwo = "window(3)(dbFoo.partition10.latency)";
_statsHolder.addStat(persistentStatOne);
_statsHolder.persistStats();
_statsHolder.addStat(persistentStatTwo);
_statsHolder.persistStats();
// generate incoming stat
String incomingStatName = "dbFoo.partition10.latency";
Map<String, String> statFields = getStatFields("0", "0");
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStatOne, "0.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStatOne, "0.0"));
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStatTwo, "0.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStatTwo, "0.0"));
}
// test merging 2 window stats, new is applied
@Test(groups = { "unitTest" })
public void testMergeTwoWindowsYesMerge() throws Exception
{
String persistentWildcardStat = "window(3)(dbFoo.partition*.latency)";
String persistentStat = "window(3)(dbFoo.partition10.latency)";
String incomingStatName = "dbFoo.partition10.latency";
// init with 3 elements
testWindowStat();
// create a two tuples, value and time
Tuple<String> valTuple = new Tuple<String>();
Tuple<String> timeTuple = new Tuple<String>();
valTuple.add("30.0");
valTuple.add("40.0");
timeTuple.add("3.0");
timeTuple.add("4.0");
Map<String, String> statFields =
getStatFields(valTuple.toString(), timeTuple.toString());
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "20.0,30.0,40.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "2.0,3.0,4.0"));
}
// test merging 2 window stats, new is ignored
@Test(groups = { "unitTest" })
public void testMergeTwoWindowsNoMerge() throws Exception
{
String persistentWildcardStat = "window(3)(dbFoo.partition*.latency)";
String persistentStat = "window(3)(dbFoo.partition10.latency)";
String incomingStatName = "dbFoo.partition10.latency";
// init with 3 elements
testWindowStat();
// create a two tuples, value and time
Tuple<String> valTuple = new Tuple<String>();
Tuple<String> timeTuple = new Tuple<String>();
valTuple.add("0.0");
valTuple.add("40.0");
timeTuple.add("0.0");
timeTuple.add("4.0");
Map<String, String> statFields =
getStatFields(valTuple.toString(), timeTuple.toString());
_statsHolder.applyStat(incomingStatName, statFields);
_statsHolder.persistStats();
// check persistent stats
HelixDataAccessor accessor = _helixManager.getHelixDataAccessor();
Builder keyBuilder = accessor.keyBuilder();
ZNRecord rec = accessor.getProperty(keyBuilder.persistantStat()).getRecord();
System.out.println("rec: " + rec.toString());
AssertJUnit.assertTrue(statRecordHasValue(rec, persistentStat, "0.0,10.0,20.0"));
AssertJUnit.assertTrue(statRecordHasTimestamp(rec, persistentStat, "0.0,1.0,2.0"));
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1alpha1/datastream.proto
package com.google.cloud.datastream.v1alpha1;
/**
*
*
* <pre>
* route list request
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1alpha1.ListRoutesRequest}
*/
public final class ListRoutesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1alpha1.ListRoutesRequest)
ListRoutesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRoutesRequest.newBuilder() to construct.
private ListRoutesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRoutesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRoutesRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ListRoutesRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 16:
{
pageSize_ = input.readInt32();
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
pageToken_ = s;
break;
}
case 34:
{
java.lang.String s = input.readStringRequireUtf8();
filter_ = s;
break;
}
case 42:
{
java.lang.String s = input.readStringRequireUtf8();
orderBy_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_ListRoutesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_ListRoutesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.ListRoutesRequest.class,
com.google.cloud.datastream.v1alpha1.ListRoutesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Required. The parent that owns the collection of Routess.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent that owns the collection of Routess.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_;
/**
*
*
* <pre>
* Maximum number of Routes to return. The service may return
* fewer than this value. If unspecified, at most 50 Routes
* will be returned. The maximum value is 1000; values above 1000 will be
* coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
private volatile java.lang.Object pageToken_;
/**
*
*
* <pre>
* Page token received from a previous `ListRoutes` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to
* `ListRoutes` must match the call that provided the page
* token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Page token received from a previous `ListRoutes` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to
* `ListRoutes` must match the call that provided the page
* token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
private volatile java.lang.Object filter_;
/**
*
*
* <pre>
* Filter request.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Filter request.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 5;
private volatile java.lang.Object orderBy_;
/**
*
*
* <pre>
* Order by fields for the result.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Order by fields for the result.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, orderBy_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, orderBy_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1alpha1.ListRoutesRequest)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1alpha1.ListRoutesRequest other =
(com.google.cloud.datastream.v1alpha1.ListRoutesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datastream.v1alpha1.ListRoutesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* route list request
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1alpha1.ListRoutesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1alpha1.ListRoutesRequest)
com.google.cloud.datastream.v1alpha1.ListRoutesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_ListRoutesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_ListRoutesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.ListRoutesRequest.class,
com.google.cloud.datastream.v1alpha1.ListRoutesRequest.Builder.class);
}
// Construct using com.google.cloud.datastream.v1alpha1.ListRoutesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamServiceProto
.internal_static_google_cloud_datastream_v1alpha1_ListRoutesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.ListRoutesRequest getDefaultInstanceForType() {
return com.google.cloud.datastream.v1alpha1.ListRoutesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.ListRoutesRequest build() {
com.google.cloud.datastream.v1alpha1.ListRoutesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.ListRoutesRequest buildPartial() {
com.google.cloud.datastream.v1alpha1.ListRoutesRequest result =
new com.google.cloud.datastream.v1alpha1.ListRoutesRequest(this);
result.parent_ = parent_;
result.pageSize_ = pageSize_;
result.pageToken_ = pageToken_;
result.filter_ = filter_;
result.orderBy_ = orderBy_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1alpha1.ListRoutesRequest) {
return mergeFrom((com.google.cloud.datastream.v1alpha1.ListRoutesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datastream.v1alpha1.ListRoutesRequest other) {
if (other == com.google.cloud.datastream.v1alpha1.ListRoutesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.datastream.v1alpha1.ListRoutesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.datastream.v1alpha1.ListRoutesRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent that owns the collection of Routess.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent that owns the collection of Routess.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent that owns the collection of Routess.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent that owns the collection of Routess.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent that owns the collection of Routess.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Maximum number of Routes to return. The service may return
* fewer than this value. If unspecified, at most 50 Routes
* will be returned. The maximum value is 1000; values above 1000 will be
* coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Maximum number of Routes to return. The service may return
* fewer than this value. If unspecified, at most 50 Routes
* will be returned. The maximum value is 1000; values above 1000 will be
* coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Maximum number of Routes to return. The service may return
* fewer than this value. If unspecified, at most 50 Routes
* will be returned. The maximum value is 1000; values above 1000 will be
* coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Page token received from a previous `ListRoutes` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to
* `ListRoutes` must match the call that provided the page
* token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Page token received from a previous `ListRoutes` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to
* `ListRoutes` must match the call that provided the page
* token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Page token received from a previous `ListRoutes` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to
* `ListRoutes` must match the call that provided the page
* token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token received from a previous `ListRoutes` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to
* `ListRoutes` must match the call that provided the page
* token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
onChanged();
return this;
}
/**
*
*
* <pre>
* Page token received from a previous `ListRoutes` call.
* Provide this to retrieve the subsequent page.
* When paginating, all other parameters provided to
* `ListRoutes` must match the call that provided the page
* token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Filter request.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Filter request.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Filter request.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Filter request.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
onChanged();
return this;
}
/**
*
*
* <pre>
* Filter request.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* Order by fields for the result.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Order by fields for the result.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Order by fields for the result.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Order by fields for the result.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
onChanged();
return this;
}
/**
*
*
* <pre>
* Order by fields for the result.
* </pre>
*
* <code>string order_by = 5;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1alpha1.ListRoutesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1alpha1.ListRoutesRequest)
private static final com.google.cloud.datastream.v1alpha1.ListRoutesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1alpha1.ListRoutesRequest();
}
public static com.google.cloud.datastream.v1alpha1.ListRoutesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRoutesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListRoutesRequest>() {
@java.lang.Override
public ListRoutesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ListRoutesRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ListRoutesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRoutesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.ListRoutesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.optaplanner.examples.conferencescheduling.solver;
import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import org.junit.Test;
import org.optaplanner.core.api.solver.SolverFactory;
import org.optaplanner.examples.conferencescheduling.app.ConferenceSchedulingApp;
import org.optaplanner.examples.conferencescheduling.domain.ConferenceSolution;
import org.optaplanner.examples.conferencescheduling.domain.Room;
import org.optaplanner.examples.conferencescheduling.domain.Speaker;
import org.optaplanner.examples.conferencescheduling.domain.Talk;
import org.optaplanner.examples.conferencescheduling.domain.TalkType;
import org.optaplanner.examples.conferencescheduling.domain.Timeslot;
import org.optaplanner.test.impl.score.buildin.hardsoft.HardSoftScoreVerifier;
import static org.optaplanner.examples.conferencescheduling.domain.ConferenceParametrization.*;
public class ConferenceSchedulingScoreHardConstraintTest {
private HardSoftScoreVerifier<ConferenceSolution> scoreVerifier = new HardSoftScoreVerifier<>(
SolverFactory.createFromXmlResource(ConferenceSchedulingApp.SOLVER_CONFIG));
@Test
public void talkTypeOfTimeSlot() {
Talk talk1 = new Talk(1L)
.withSpeakerList(Collections.emptyList())
.withRequiredRoomTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Talk talk2 = new Talk(2L)
.withSpeakerList(Collections.emptyList())
.withRequiredRoomTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Timeslot slot1 = new Timeslot(1L);
Timeslot slot2 = new Timeslot(2L);
TalkType talkType1 = new TalkType(0L, "type1");
TalkType talkType2 = new TalkType(1L, "type2");
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Arrays.asList(talkType1, talkType2))
.withTalkList(Arrays.asList(talk1, talk2))
.withTimeslotList(Arrays.asList(slot1, slot2))
.withRoomList(Collections.emptyList())
.withSpeakerList(Collections.emptyList());
scoreVerifier.assertHardWeight(TALK_TYPE_OF_TIMESLOT, 0, solution);
// time slot with matching talk type
talk1.withTalkType(talkType1).withTimeslot(slot1);
slot1.setTalkTypeSet(Collections.singleton(talkType1));
scoreVerifier.assertHardWeight(TALK_TYPE_OF_TIMESLOT, 0, solution);
// time slot with non matching talk type
talk2.withTalkType(talkType2).withTimeslot(slot2);
slot2.setTalkTypeSet(Collections.singleton(talkType1));
scoreVerifier.assertHardWeight(TALK_TYPE_OF_TIMESLOT, -10000, solution);
}
@Test
public void talkHasUnavailableRoom() {
TalkType talkType = new TalkType(0L, "type1");
Talk talk1 = new Talk(1L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Talk talk2 = new Talk(2L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Timeslot slot1 = new Timeslot(1L).withTalkTypeSet(Collections.singleton(talkType));
Timeslot slot2 = new Timeslot(2L).withTalkTypeSet(Collections.singleton(talkType));
Room room1 = new Room(1L).withTalkTypeSet(Collections.emptySet());
Room room2 = new Room(2L).withTalkTypeSet(Collections.emptySet());
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1, talk2))
.withTimeslotList(Arrays.asList(slot1, slot2))
.withRoomList(Arrays.asList(room1, room2))
.withSpeakerList(Collections.emptyList());
scoreVerifier.assertHardWeight(ROOM_UNAVAILABLE_TIMESLOT, 0, solution);
// talk with available room
room1.setUnavailableTimeslotSet(Collections.emptySet());
talk1.withTimeslot(slot1).withRoom(room1);
scoreVerifier.assertHardWeight(ROOM_UNAVAILABLE_TIMESLOT, 0, solution);
room1.setUnavailableTimeslotSet(new HashSet<>(Arrays.asList(slot2)));
scoreVerifier.assertHardWeight(ROOM_UNAVAILABLE_TIMESLOT, 0, solution);
// talk with room with unavailable time slot
room1.setUnavailableTimeslotSet(new HashSet<>(Arrays.asList(slot1)));
talk1.withTimeslot(slot1).withRoom(room1);
scoreVerifier.assertHardWeight(ROOM_UNAVAILABLE_TIMESLOT, -10000, solution);
room1.setUnavailableTimeslotSet(new HashSet<>(Arrays.asList(slot1, slot2)));
talk1.withTimeslot(slot1).withRoom(room1);
scoreVerifier.assertHardWeight(ROOM_UNAVAILABLE_TIMESLOT, -10000, solution);
}
@Test
public void roomConflict() {
TalkType talkType = new TalkType(0L, "type1");
Talk talk1 = new Talk(1L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Talk talk2 = new Talk(2L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
LocalDateTime start1 = LocalDateTime.of(2018, 1, 1, 9, 0);
LocalDateTime end1 = LocalDateTime.of(2018, 1, 1, 10, 0);
LocalDateTime start2 = LocalDateTime.of(2018, 1, 1, 9, 30);
LocalDateTime end2 = LocalDateTime.of(2018, 1, 1, 10, 30);
LocalDateTime start3 = LocalDateTime.of(2018, 1, 1, 10, 0);
LocalDateTime end3 = LocalDateTime.of(2018, 1, 1, 11, 0);
Timeslot slot1 = new Timeslot(1L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start1)
.withEndDateTime(end1);
Timeslot slot2 = new Timeslot(2L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start2)
.withEndDateTime(end2);
Timeslot slot3 = new Timeslot(3L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start3)
.withEndDateTime(end3);
Room room1 = new Room(1L)
.withTalkTypeSet(Collections.singleton(talkType))
.withUnavailableTimeslotSet(Collections.emptySet());
Room room2 = new Room(2L)
.withTalkTypeSet(Collections.singleton(talkType))
.withUnavailableTimeslotSet(Collections.emptySet());
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1, talk2))
.withTimeslotList(Arrays.asList(slot1, slot2, slot3))
.withRoomList(Arrays.asList(room1, room2))
.withSpeakerList(Collections.emptyList());
scoreVerifier.assertHardWeight(ROOM_CONFLICT, 0, solution);
// talks in same room without overlapping time slots
talk1.withRoom(room1).withTimeslot(slot1);
talk2.withRoom(room1).withTimeslot(slot3);
scoreVerifier.assertHardWeight(ROOM_CONFLICT, 0, solution);
// talks in same room with overlapping time slots
talk1.withRoom(room2).withTimeslot(slot1);
talk2.withRoom(room2).withTimeslot(slot2);
scoreVerifier.assertHardWeight(ROOM_CONFLICT, -10, solution);
// talks in different room with overlapping time slots
talk1.withRoom(room1).withTimeslot(slot1);
talk2.withRoom(room2).withTimeslot(slot2);
scoreVerifier.assertHardWeight(ROOM_CONFLICT, 0, solution);
}
@Test
public void talkWithUnavailableSpeaker() {
TalkType talkType = new TalkType(0L, "type1");
Speaker speaker1 = new Speaker(1L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Speaker speaker2 = new Speaker(2L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Timeslot slot1 = new Timeslot(1L).withTalkTypeSet(Collections.singleton(talkType));
Timeslot slot2 = new Timeslot(1L).withTalkTypeSet(Collections.singleton(talkType));
Talk talk1 = new Talk(1L)
.withSpeakerList(Collections.emptyList())
.withTalkType(talkType)
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1))
.withTimeslotList(Arrays.asList(slot1))
.withRoomList(Collections.emptyList())
.withSpeakerList(Arrays.asList(speaker1, speaker2));
scoreVerifier.assertHardWeight(SPEAKER_UNAVAILABLE_TIMESLOT, 0, solution);
// talk without unavailable speaker
talk1.withSpeakerList(Arrays.asList(speaker1)).withTimeslot(slot1);
speaker1.setUnavailableTimeslotSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_UNAVAILABLE_TIMESLOT, 0, solution);
speaker1.setUnavailableTimeslotSet(new HashSet<>(Arrays.asList(slot2)));
scoreVerifier.assertHardWeight(SPEAKER_UNAVAILABLE_TIMESLOT, 0, solution);
// talk with 2 speakers
talk1.withSpeakerList(Arrays.asList(speaker1, speaker2)).withTimeslot(slot1);
scoreVerifier.assertHardWeight(SPEAKER_UNAVAILABLE_TIMESLOT, 0, solution);
speaker2.setUnavailableTimeslotSet(new HashSet<>(Arrays.asList(slot2)));
scoreVerifier.assertHardWeight(SPEAKER_UNAVAILABLE_TIMESLOT, 0, solution);
// talk with 1 or more unavailable speakers
talk1.withSpeakerList(Arrays.asList(speaker1)).withTimeslot(slot1);
speaker1.setUnavailableTimeslotSet(new HashSet<>(Arrays.asList(slot1)));
scoreVerifier.assertHardWeight(SPEAKER_UNAVAILABLE_TIMESLOT, -1, solution);
speaker2.setUnavailableTimeslotSet(Collections.emptySet());
talk1.withSpeakerList(Arrays.asList(speaker1, speaker2)).withTimeslot(slot1);
scoreVerifier.assertHardWeight(SPEAKER_UNAVAILABLE_TIMESLOT, -1, solution);
speaker2.setUnavailableTimeslotSet(new HashSet<>(Arrays.asList(slot2)));
scoreVerifier.assertHardWeight(SPEAKER_UNAVAILABLE_TIMESLOT, -1, solution);
speaker2.setUnavailableTimeslotSet(new HashSet<>(Arrays.asList(slot1, slot2)));
scoreVerifier.assertHardWeight(SPEAKER_UNAVAILABLE_TIMESLOT, -1, solution);
}
@Test
public void speakerWithConflictingTimeslots() {
TalkType talkType = new TalkType(0L, "type1");
Speaker speaker1 = new Speaker(1L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Talk talk1 = new Talk(1L)
.withSpeakerList(Collections.emptyList())
.withTalkType(talkType)
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Talk talk2 = new Talk(2L)
.withSpeakerList(Collections.emptyList())
.withTalkType(talkType)
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
LocalDateTime start1 = LocalDateTime.of(2018, 1, 1, 9, 0);
LocalDateTime end1 = LocalDateTime.of(2018, 1, 1, 10, 0);
LocalDateTime start2 = LocalDateTime.of(2018, 1, 1, 9, 30);
LocalDateTime end2 = LocalDateTime.of(2018, 1, 1, 10, 30);
LocalDateTime start3 = LocalDateTime.of(2018, 1, 1, 10, 0);
LocalDateTime end3 = LocalDateTime.of(2018, 1, 1, 11, 0);
Timeslot slot1 = new Timeslot(1L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start1)
.withEndDateTime(end1);
Timeslot slot2 = new Timeslot(2L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start2)
.withEndDateTime(end2);
Timeslot slot3 = new Timeslot(3L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start3)
.withEndDateTime(end3);
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1,talk2))
.withTimeslotList(Arrays.asList(slot1,slot2,slot3))
.withRoomList(Collections.emptyList())
.withSpeakerList(Arrays.asList(speaker1));
scoreVerifier.assertHardWeight(SPEAKER_CONFLICT, 0, solution);
// speaker has no conflicting time slots
talk1.withSpeakerList(Arrays.asList(speaker1)).withTimeslot(slot1);
talk2.withSpeakerList(Arrays.asList(speaker1)).withTimeslot(slot3);
scoreVerifier.assertHardWeight(SPEAKER_CONFLICT, 0, solution);
// speaker has no conflicting time slots
talk2.withTimeslot(slot2);
scoreVerifier.assertHardWeight(SPEAKER_CONFLICT, -1, solution);
}
@Test
public void speakerRequiredTimeSlotTag() {
String tag1 = "tag1";
String tag2 = "tag2";
String tag3 = "tag3";
TalkType talkType = new TalkType(0L, "type1");
Speaker speaker1 = new Speaker(1L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Speaker speaker2 = new Speaker(2L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Talk talk1 = new Talk(1L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
LocalDateTime start1 = LocalDateTime.of(2018, 1, 1, 9, 0);
LocalDateTime end1 = LocalDateTime.of(2018, 1, 1, 10, 0);
Timeslot slot1 = new Timeslot(1L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start1)
.withEndDateTime(end1);
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1))
.withTimeslotList(Arrays.asList(slot1))
.withRoomList(Collections.emptyList())
.withSpeakerList(Arrays.asList(speaker1, speaker2));
// talk with 1 speaker, speaker without required time slot tag
talk1.withSpeakerList(Arrays.asList(speaker1)).withTimeslot(slot1);
slot1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, 0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, 0, solution);
// talk with 1 speaker, speaker with required time slot tag, time slot without matching tag
slot1.setTagSet(Collections.emptySet());
speaker1.setRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, -1, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag2, tag3)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, -1, solution);
// talk with 1 speaker, speaker with required time slot tag, time slot with matching tag
speaker1.setRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, 0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, 0, solution);
// talk with 1 speaker, speaker with 2 required time slot tags
speaker1.setRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
slot1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, -2, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, -1, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, 0, solution);
// talk with 2 speakers, speakers with required time slot tag, time slot without matching tag
talk1.withSpeakerList(Arrays.asList(speaker1, speaker2));
slot1.setTagSet(Collections.emptySet());
speaker1.setRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
speaker2.setRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, -2, solution);
// talk with 2 speakers, speakers with 2 required time slot tags, time slot without matching tag
slot1.setTagSet(Collections.emptySet());
speaker1.setRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
speaker2.setRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, -4, solution);
// talk with 2 speakers, speakers with different required time slot tags, time slot with partially matching tag
slot1.setTagSet(Collections.emptySet());
speaker1.setRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
speaker2.setRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag3)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, -4, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, -2, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag2)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_TIMESLOT_TAG, -3, solution);
}
@Test
public void speakerProhibitedTimeSlotTag() {
String tag1 = "tag1";
String tag2 = "tag2";
String tag3 = "tag3";
TalkType talkType = new TalkType(0L, "type1");
Speaker speaker1 = new Speaker(1L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Speaker speaker2 = new Speaker(2L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
Talk talk1 = new Talk(1L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
LocalDateTime start1 = LocalDateTime.of(2018, 1, 1, 9, 0);
LocalDateTime end1 = LocalDateTime.of(2018, 1, 1, 10, 0);
Timeslot slot1 = new Timeslot(1L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start1)
.withEndDateTime(end1);
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1))
.withTimeslotList(Arrays.asList(slot1))
.withRoomList(Collections.emptyList())
.withSpeakerList(Arrays.asList(speaker1, speaker2));
// talk with 1 speaker, speaker without prohibited time slot tag
talk1.withSpeakerList(Arrays.asList(speaker1)).withTimeslot(slot1);
slot1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, 0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, 0, solution);
// talk with 1 speaker, speaker with prohibited time slot tag, time slot without matching tag
speaker1.setProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
slot1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, 0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag2)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, 0, solution);
// talk with 1 speaker, speaker with prohibited time slot tag, time slot with matching tag
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -1, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -1, solution);
// talk with 1 speaker, speaker with 2 required time slot tags
speaker1.setProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
slot1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, 0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -1, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -2, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2, tag3)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -2, solution);
// talk with 2 speakers, speakers with prohibited time slot tag, time slot without matching tag
talk1.withSpeakerList(Arrays.asList(speaker1, speaker2));
slot1.setTagSet(Collections.emptySet());
speaker1.setProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
speaker2.setProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, 0, solution);
// talk with 2 speakers, speakers with prohibited time slot tags, time slot with matching tags
speaker1.setProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
speaker2.setProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -2, solution);
speaker1.setProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -2, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -3, solution);
speaker2.setProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -4, solution);
speaker2.setProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag3)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -3, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2, tag3)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_TIMESLOT_TAG, -4, solution);
}
@Test
public void talkRequiredTimeslotTag() {
String tag1 = "tag1";
String tag2 = "tag2";
TalkType talkType = new TalkType(0L, "type1");
Talk talk1 = new Talk(1L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
LocalDateTime start1 = LocalDateTime.of(2018, 1, 1, 9, 0);
LocalDateTime end1 = LocalDateTime.of(2018, 1, 1, 10, 0);
Timeslot slot1 = new Timeslot(1L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start1)
.withEndDateTime(end1);
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1))
.withTimeslotList(Arrays.asList(slot1))
.withRoomList(Collections.emptyList())
.withSpeakerList(Collections.emptyList());
// talk without required time slot tag
slot1.setTagSet(Collections.emptySet());
talk1.withTimeslot(slot1);
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, 0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, 0, solution);
// talk with required time slot tag, time slot without matching tag
talk1.withRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
slot1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, -1, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag2)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, -1, solution);
// talk with required time slot tag, time slot with matching tag
talk1.withRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, 0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, 0, solution);
// talk with 2 required time slot tags
slot1.setTagSet(Collections.emptySet());
talk1.withRequiredTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, -2, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, -1, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, 0, solution);
}
@Test
public void talkProhibitedTimeslotTag() {
String tag1 = "tag1";
String tag2 = "tag2";
TalkType talkType = new TalkType(0L, "type1");
Talk talk1 = new Talk(1L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withProhibitedTimeslotTagSet(Collections.emptySet())
.withUndesiredTimeslotTagSet(Collections.emptySet());
LocalDateTime start1 = LocalDateTime.of(2018, 1, 1, 9, 0);
LocalDateTime end1 = LocalDateTime.of(2018, 1, 1, 10, 0);
Timeslot slot1 = new Timeslot(1L)
.withTalkTypeSet(Collections.singleton(talkType))
.withStartDateTime(start1)
.withEndDateTime(end1);
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1))
.withTimeslotList(Arrays.asList(slot1))
.withRoomList(Collections.emptyList())
.withSpeakerList(Collections.emptyList());
// talk without prohibited time slot tags
slot1.setTagSet(Collections.emptySet());
talk1.withTimeslot(slot1);
scoreVerifier.assertHardWeight(TALK_PROHIBITED_TIMESLOT_TAG, 0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_TIMESLOT_TAG, 0, solution);
// talk with prohibited time slot tag, time slot without matching tag
slot1.setTagSet(Collections.emptySet());
talk1.withTimeslot(slot1).withProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_TIMESLOT_TAG, 0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag2)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_TIMESLOT_TAG, 0, solution);
// talk with prohibited time slot tag, time slot with matching tag
talk1.withTimeslot(slot1).withProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1)));
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_TIMESLOT_TAG, -1, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_TIMESLOT_TAG, -1, solution);
// talk with 2 prohibited time slot tags
talk1.withTimeslot(slot1).withProhibitedTimeslotTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
slot1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(TALK_PROHIBITED_TIMESLOT_TAG, -0, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_TIMESLOT_TAG, -1, solution);
slot1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_TIMESLOT_TAG, -2, solution);
}
@Test
public void speakerRequiredRoomTag() {
String tag1 = "tag1";
String tag2 = "tag2";
String tag3 = "tag3";
TalkType talkType = new TalkType(0L, "type1");
Room room1 = new Room(1L).withTalkTypeSet(Collections.singleton(talkType));
Talk talk1 = new Talk(1L)
.withSpeakerList(Collections.emptyList())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet());
Speaker speaker1 = new Speaker(1L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet());
Speaker speaker2 = new Speaker(2L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet());
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1))
.withTimeslotList(Collections.emptyList())
.withRoomList(Arrays.asList(room1))
.withSpeakerList(Arrays.asList(speaker1, speaker2));
// talk with 1 speaker without required room tags
room1.setTagSet(Collections.emptySet());
talk1.withSpeakerList(Arrays.asList(speaker1)).withRoom(room1);
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, 0, solution);
// talk with 1 speaker, speaker with required room tag, room without matching tag
speaker1.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -1, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag3)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -1, solution);
// talk with 1 speaker, speaker with required room tag, room with matching tag
speaker1.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -0, solution);
// talk with 1 speaker, speaker with 2 required room tags
speaker1.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -2, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -1, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, 0, solution);
// talk with 2 speakers
talk1.withSpeakerList(Arrays.asList(speaker1, speaker2)).withRoom(room1);
speaker1.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
speaker2.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -2, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, 0, solution);
speaker2.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag2)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -2, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -1, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, 0, solution);
speaker2.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -3, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, -1, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_REQUIRED_ROOM_TAG, 0, solution);
}
@Test
public void speakerProhibitedRoomTag() {
String tag1 = "tag1";
String tag2 = "tag2";
String tag3 = "tag3";
TalkType talkType = new TalkType(0L, "type1");
Room room1 = new Room(1L).withTalkTypeSet(Collections.singleton(talkType));
Talk talk1 = new Talk(1L)
.withSpeakerList(Collections.emptyList())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet());
Speaker speaker1 = new Speaker(1L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet());
Speaker speaker2 = new Speaker(2L)
.withUnavailableTimeslotSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withRequiredRoomTagSet(Collections.emptySet())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet());
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1))
.withTimeslotList(Collections.emptyList())
.withRoomList(Arrays.asList(room1))
.withSpeakerList(Arrays.asList(speaker1, speaker2));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, 0, solution);
// talk with 1 speaker without prohibited room tags
room1.setTagSet(Collections.emptySet());
talk1.withSpeakerList(Arrays.asList(speaker1)).withRoom(room1);
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, 0, solution);
// talk with 1 speaker, speaker with prohibited room tag, room without matching tag
speaker1.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, 0, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag3)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, 0, solution);
// talk with 1 speaker, speaker with prohibited room tag, room with matching tag
speaker1.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, -1, solution);
// talk with 1 speaker, speaker with 2 prohibited room tags
speaker1.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, 0, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, -1, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, -2, solution);
// talk with 2 speakers
talk1.withSpeakerList(Arrays.asList(speaker1, speaker2)).withRoom(room1);
speaker1.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
speaker2.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, 0, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, -2, solution);
speaker2.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag2)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, 0, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, -1, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, -2, solution);
speaker2.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, 0, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, -2, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(SPEAKER_PROHIBITED_ROOM_TAG, -3, solution);
}
@Test
public void talkRequiredRoomTag() {
String tag1 = "tag1";
String tag2 = "tag2";
TalkType talkType = new TalkType(0L, "type1");
Room room1 = new Room(1L).withTalkTypeSet(Collections.emptySet());
Talk talk1 = new Talk(1L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withRequiredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet());
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1))
.withTimeslotList(Collections.emptyList())
.withRoomList(Collections.emptyList())
.withSpeakerList(Collections.emptyList());
// talk without required room tags
room1.setTagSet(Collections.emptySet());
talk1.withRoom(room1);
scoreVerifier.assertHardWeight(TALK_REQUIRED_ROOM_TAG, 0, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_ROOM_TAG, 0, solution);
// talk with required room tag, room without matching tag
talk1.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(TALK_REQUIRED_ROOM_TAG, -1, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag2)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_ROOM_TAG, -1, solution);
// talk with required room tag, room with matching tag
talk1.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_ROOM_TAG, 0, solution);
// talk with 2 required room tags
room1.setTagSet(Collections.emptySet());
talk1.withRequiredRoomTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_ROOM_TAG, -2, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_ROOM_TAG, -1, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(TALK_REQUIRED_ROOM_TAG, 0, solution);
}
@Test
public void talkProhibitedRoomTag() {
String tag1 = "tag1";
String tag2 = "tag2";
TalkType talkType = new TalkType(0L, "type1");
Room room1 = new Room(1L).withTalkTypeSet(Collections.emptySet());
Talk talk1 = new Talk(1L)
.withTalkType(talkType)
.withSpeakerList(Collections.emptyList())
.withPreferredTimeslotTagSet(Collections.emptySet())
.withRequiredTimeslotTagSet(Collections.emptySet())
.withPreferredRoomTagSet(Collections.emptySet())
.withRequiredRoomTagSet(Collections.emptySet())
.withProhibitedRoomTagSet(Collections.emptySet())
.withUndesiredRoomTagSet(Collections.emptySet());
ConferenceSolution solution = new ConferenceSolution(1L)
.withTalkTypeList(Collections.singletonList(talkType))
.withTalkList(Arrays.asList(talk1))
.withTimeslotList(Collections.emptyList())
.withRoomList(Collections.emptyList())
.withSpeakerList(Collections.emptyList());
// talk without prohibited room tags
room1.setTagSet(Collections.emptySet());
talk1.withRoom(room1);
scoreVerifier.assertHardWeight(TALK_PROHIBITED_ROOM_TAG, 0, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_ROOM_TAG, 0, solution);
// talk with prohibited room tag, room without matching tag
talk1.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(Collections.emptySet());
scoreVerifier.assertHardWeight(TALK_PROHIBITED_ROOM_TAG, 0, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag2)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_ROOM_TAG, 0, solution);
// talk with prohibited room tag, room with matching tag
talk1.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag1)));
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_ROOM_TAG, -1, solution);
// talk with 2 prohibited room tags
room1.setTagSet(Collections.emptySet());
talk1.withProhibitedRoomTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_ROOM_TAG, 0, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_ROOM_TAG, -1, solution);
room1.setTagSet(new HashSet<>(Arrays.asList(tag1, tag2)));
scoreVerifier.assertHardWeight(TALK_PROHIBITED_ROOM_TAG, -2, solution);
}
}
| |
/*
* Copyright 2013, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.dexlib2.dexbacked.raw;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import org.jf.dexlib2.VerificationError;
import org.jf.dexlib2.dexbacked.DexReader;
import org.jf.dexlib2.dexbacked.instruction.DexBackedInstruction;
import org.jf.dexlib2.dexbacked.raw.util.DexAnnotator;
import org.jf.dexlib2.iface.instruction.*;
import org.jf.dexlib2.iface.instruction.formats.*;
import org.jf.dexlib2.util.AnnotatedBytes;
import org.jf.dexlib2.util.ReferenceUtil;
import org.jf.util.ExceptionWithContext;
import org.jf.util.NumberUtils;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.List;
public class CodeItem {
public static final int REGISTERS_OFFSET = 0;
public static final int INS_OFFSET = 2;
public static final int OUTS_OFFSET = 4;
public static final int TRIES_SIZE_OFFSET = 6;
public static final int DEBUG_INFO_OFFSET = 8;
public static final int INSTRUCTION_COUNT_OFFSET = 12;
public static final int INSTRUCTION_START_OFFSET = 16;
public static class TryItem {
public static final int ITEM_SIZE = 8;
public static final int START_ADDRESS_OFFSET = 0;
public static final int CODE_UNIT_COUNT_OFFSET = 4;
public static final int HANDLER_OFFSET = 6;
}
@Nonnull
public static SectionAnnotator makeAnnotator(@Nonnull DexAnnotator annotator, @Nonnull MapItem mapItem) {
return new SectionAnnotator(annotator, mapItem) {
private SectionAnnotator debugInfoAnnotator = null;
@Override public void annotateSection(@Nonnull AnnotatedBytes out) {
debugInfoAnnotator = annotator.getAnnotator(ItemType.DEBUG_INFO_ITEM);
super.annotateSection(out);
}
@Nonnull @Override public String getItemName() {
return "code_item";
}
@Override public int getItemAlignment() {
return 4;
}
@Override
public void annotateItem(@Nonnull AnnotatedBytes out, int itemIndex, @Nullable String itemIdentity) {
try {
DexReader reader = dexFile.readerAt(out.getCursor());
int registers = reader.readUshort();
out.annotate(2, "registers_size = %d", registers);
int inSize = reader.readUshort();
out.annotate(2, "ins_size = %d", inSize);
int outSize = reader.readUshort();
out.annotate(2, "outs_size = %d", outSize);
int triesCount = reader.readUshort();
out.annotate(2, "tries_size = %d", triesCount);
int debugInfoOffset = reader.readSmallUint();
out.annotate(4, "debug_info_off = 0x%x", debugInfoOffset);
if (debugInfoOffset != 0) {
addDebugInfoIdentity(debugInfoOffset, itemIdentity);
}
int instructionSize = reader.readSmallUint();
out.annotate(4, "insns_size = 0x%x", instructionSize);
out.annotate(0, "instructions:");
out.indent();
out.setLimit(out.getCursor(), out.getCursor() + instructionSize * 2);
int end = reader.getOffset() + instructionSize*2;
try {
while (reader.getOffset() < end) {
Instruction instruction = DexBackedInstruction.readFrom(reader);
// if we read past the end of the instruction list
if (reader.getOffset() > end) {
out.annotateTo(end, "truncated instruction");
reader.setOffset(end);
} else {
switch (instruction.getOpcode().format) {
case Format10x:
annotateInstruction10x(out, instruction);
break;
case Format35c:
annotateInstruction35c(out, (Instruction35c)instruction);
break;
case Format3rc:
annotateInstruction3rc(out, (Instruction3rc)instruction);
break;
case ArrayPayload:
annotateArrayPayload(out, (ArrayPayload)instruction);
break;
case PackedSwitchPayload:
annotatePackedSwitchPayload(out, (PackedSwitchPayload)instruction);
break;
case SparseSwitchPayload:
annotateSparseSwitchPayload(out, (SparseSwitchPayload)instruction);
break;
default:
annotateDefaultInstruction(out, instruction);
break;
}
}
assert reader.getOffset() == out.getCursor();
}
} catch (ExceptionWithContext ex) {
ex.printStackTrace(System.err);
out.annotate(0, "annotation error: %s", ex.getMessage());
out.moveTo(end);
reader.setOffset(end);
} finally {
out.clearLimit();
out.deindent();
}
if (triesCount > 0) {
if ((reader.getOffset() % 4) != 0) {
reader.readUshort();
out.annotate(2, "padding");
}
out.annotate(0, "try_items:");
out.indent();
try {
for (int i=0; i<triesCount; i++) {
out.annotate(0, "try_item[%d]:", i);
out.indent();
try {
int startAddr = reader.readSmallUint();
out.annotate(4, "start_addr = 0x%x", startAddr);
int instructionCount = reader.readUshort();
out.annotate(2, "insn_count = 0x%x", instructionCount);
int handlerOffset = reader.readUshort();
out.annotate(2, "handler_off = 0x%x", handlerOffset);
} finally {
out.deindent();
}
}
} finally {
out.deindent();
}
int handlerListCount = reader.readSmallUleb128();
out.annotate(0, "encoded_catch_handler_list:");
out.annotateTo(reader.getOffset(), "size = %d", handlerListCount);
out.indent();
try {
for (int i=0; i<handlerListCount; i++) {
out.annotate(0, "encoded_catch_handler[%d]", i);
out.indent();
try {
int handlerCount = reader.readSleb128();
out.annotateTo(reader.getOffset(), "size = %d", handlerCount);
boolean hasCatchAll = handlerCount <= 0;
handlerCount = Math.abs(handlerCount);
if (handlerCount != 0) {
out.annotate(0, "handlers:");
out.indent();
try {
for (int j=0; j<handlerCount; j++) {
out.annotate(0, "encoded_type_addr_pair[%d]", i);
out.indent();
try {
int typeIndex = reader.readSmallUleb128();
out.annotateTo(reader.getOffset(), TypeIdItem.getReferenceAnnotation(dexFile, typeIndex));
int handlerAddress = reader.readSmallUleb128();
out.annotateTo(reader.getOffset(), "addr = 0x%x", handlerAddress);
} finally {
out.deindent();
}
}
} finally {
out.deindent();
}
}
if (hasCatchAll) {
int catchAllAddress = reader.readSmallUleb128();
out.annotateTo(reader.getOffset(), "catch_all_addr = 0x%x", catchAllAddress);
}
} finally {
out.deindent();
}
}
} finally {
out.deindent();
}
}
} catch (ExceptionWithContext ex) {
out.annotate(0, "annotation error: %s", ex.getMessage());
}
}
private String formatRegister(int registerNum) {
return String.format("v%d", registerNum);
}
private void annotateInstruction10x(@Nonnull AnnotatedBytes out, @Nonnull Instruction instruction) {
out.annotate(2, instruction.getOpcode().name);
}
private void annotateInstruction35c(@Nonnull AnnotatedBytes out, @Nonnull Instruction35c instruction) {
List<String> args = Lists.newArrayList();
int registerCount = instruction.getRegisterCount();
if (registerCount == 1) {
args.add(formatRegister(instruction.getRegisterC()));
} else if (registerCount == 2) {
args.add(formatRegister(instruction.getRegisterC()));
args.add(formatRegister(instruction.getRegisterD()));
} else if (registerCount == 3) {
args.add(formatRegister(instruction.getRegisterC()));
args.add(formatRegister(instruction.getRegisterD()));
args.add(formatRegister(instruction.getRegisterE()));
} else if (registerCount == 4) {
args.add(formatRegister(instruction.getRegisterC()));
args.add(formatRegister(instruction.getRegisterD()));
args.add(formatRegister(instruction.getRegisterE()));
args.add(formatRegister(instruction.getRegisterF()));
} else if (registerCount == 5) {
args.add(formatRegister(instruction.getRegisterC()));
args.add(formatRegister(instruction.getRegisterD()));
args.add(formatRegister(instruction.getRegisterE()));
args.add(formatRegister(instruction.getRegisterF()));
args.add(formatRegister(instruction.getRegisterG()));
}
String reference = ReferenceUtil.getReferenceString(instruction.getReference());
out.annotate(6, String.format("%s {%s}, %s",
instruction.getOpcode().name, Joiner.on(", ").join(args), reference));
}
private void annotateInstruction3rc(@Nonnull AnnotatedBytes out, @Nonnull Instruction3rc instruction) {
int startRegister = instruction.getStartRegister();
int endRegister = startRegister + instruction.getRegisterCount() - 1;
String reference = ReferenceUtil.getReferenceString(instruction.getReference());
out.annotate(6, String.format("%s {%s .. %s}, %s",
instruction.getOpcode().name, formatRegister(startRegister), formatRegister(endRegister),
reference));
}
private void annotateDefaultInstruction(@Nonnull AnnotatedBytes out, @Nonnull Instruction instruction) {
List<String> args = Lists.newArrayList();
if (instruction instanceof OneRegisterInstruction) {
args.add(formatRegister(((OneRegisterInstruction)instruction).getRegisterA()));
if (instruction instanceof TwoRegisterInstruction) {
args.add(formatRegister(((TwoRegisterInstruction)instruction).getRegisterB()));
if (instruction instanceof ThreeRegisterInstruction) {
args.add(formatRegister(((ThreeRegisterInstruction)instruction).getRegisterC()));
}
}
} else if (instruction instanceof VerificationErrorInstruction) {
String verificationError = VerificationError.getVerificationErrorName(
((VerificationErrorInstruction) instruction).getVerificationError());
if (verificationError != null) {
args.add(verificationError);
} else {
args.add("invalid verification error type");
}
}
if (instruction instanceof ReferenceInstruction) {
args.add(ReferenceUtil.getReferenceString(((ReferenceInstruction)instruction).getReference()));
} else if (instruction instanceof OffsetInstruction) {
int offset = ((OffsetInstruction)instruction).getCodeOffset();
String sign = offset>=0?"+":"-";
args.add(String.format("%s0x%x", sign, Math.abs(offset)));
} else if (instruction instanceof NarrowLiteralInstruction) {
int value = ((NarrowLiteralInstruction)instruction).getNarrowLiteral();
if (NumberUtils.isLikelyFloat(value)) {
args.add(String.format("%d # %f", value, Float.intBitsToFloat(value)));
} else {
args.add(String.format("%d", value));
}
} else if (instruction instanceof WideLiteralInstruction) {
long value = ((WideLiteralInstruction)instruction).getWideLiteral();
if (NumberUtils.isLikelyDouble(value)) {
args.add(String.format("%d # %f", value, Double.longBitsToDouble(value)));
} else {
args.add(String.format("%d", value));
}
} else if (instruction instanceof FieldOffsetInstruction) {
int fieldOffset = ((FieldOffsetInstruction)instruction).getFieldOffset();
args.add(String.format("field@0x%x", fieldOffset));
} else if (instruction instanceof VtableIndexInstruction) {
int vtableIndex = ((VtableIndexInstruction)instruction).getVtableIndex();
args.add(String.format("vtable@%d", vtableIndex));
} else if (instruction instanceof InlineIndexInstruction) {
int inlineIndex = ((InlineIndexInstruction)instruction).getInlineIndex();
args.add(String.format("inline@%d", inlineIndex));
}
out.annotate(instruction.getCodeUnits()*2, "%s %s",
instruction.getOpcode().name, Joiner.on(", ").join(args));
}
private void annotateArrayPayload(@Nonnull AnnotatedBytes out, @Nonnull ArrayPayload instruction) {
List<Number> elements = instruction.getArrayElements();
int elementWidth = instruction.getElementWidth();
out.annotate(2, instruction.getOpcode().name);
out.indent();
out.annotate(2, "element_width = %d", elementWidth);
out.annotate(4, "size = %d", elements.size());
out.annotate(0, "elements:");
out.indent();
for (int i=0; i<elements.size(); i++) {
if (elementWidth == 8) {
long value = elements.get(i).longValue();
if (NumberUtils.isLikelyDouble(value)) {
out.annotate(elementWidth, "element[%d] = %d # %f", i, value, Double.longBitsToDouble(value));
} else {
out.annotate(elementWidth, "element[%d] = %d", i, value);
}
} else {
int value = elements.get(i).intValue();
if (NumberUtils.isLikelyFloat(value)) {
out.annotate(elementWidth, "element[%d] = %d # %f", i, value, Float.intBitsToFloat(value));
} else {
out.annotate(elementWidth, "element[%d] = %d", i, value);
}
}
}
if (out.getCursor() % 2 != 0) {
out.annotate(1, "padding");
}
out.deindent();
out.deindent();
}
private void annotatePackedSwitchPayload(@Nonnull AnnotatedBytes out,
@Nonnull PackedSwitchPayload instruction) {
List<? extends SwitchElement> elements = instruction.getSwitchElements();
out.annotate(2, instruction.getOpcode().name);
out.indent();
out.annotate(2, "size = %d", elements.size());
if (elements.size() == 0) {
out.annotate(4, "first_key");
} else {
out.annotate(4, "first_key = %d", elements.get(0).getKey());
out.annotate(0, "targets:");
out.indent();
for (int i=0; i<elements.size(); i++) {
out.annotate(4, "target[%d] = %d", i, elements.get(i).getOffset());
}
out.deindent();
}
out.deindent();
}
private void annotateSparseSwitchPayload(@Nonnull AnnotatedBytes out,
@Nonnull SparseSwitchPayload instruction) {
List<? extends SwitchElement> elements = instruction.getSwitchElements();
out.annotate(2, instruction.getOpcode().name);
out.indent();
out.annotate(2, "size = %d", elements.size());
if (elements.size() > 0) {
out.annotate(0, "keys:");
out.indent();
for (int i=0; i<elements.size(); i++) {
out.annotate(4, "key[%d] = %d", i, elements.get(i).getKey());
}
out.deindent();
out.annotate(0, "targets:");
out.indent();
for (int i=0; i<elements.size(); i++) {
out.annotate(4, "target[%d] = %d", i, elements.get(i).getOffset());
}
out.deindent();
}
out.deindent();
}
private void addDebugInfoIdentity(int debugInfoOffset, String methodString) {
if (debugInfoAnnotator != null) {
debugInfoAnnotator.setItemIdentity(debugInfoOffset, methodString);
}
}
};
}
}
| |
package com.onpoint.security;
import com.onpoint.domain.PersistentToken;
import com.onpoint.domain.User;
import com.onpoint.repository.PersistentTokenRepository;
import com.onpoint.repository.UserRepository;
import org.joda.time.LocalDate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.env.Environment;
import org.springframework.dao.DataAccessException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.crypto.codec.Base64;
import org.springframework.security.web.authentication.rememberme.AbstractRememberMeServices;
import org.springframework.security.web.authentication.rememberme.CookieTheftException;
import org.springframework.security.web.authentication.rememberme.InvalidCookieException;
import org.springframework.security.web.authentication.rememberme.RememberMeAuthenticationException;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.security.SecureRandom;
import java.util.Arrays;
/**
* Custom implementation of Spring Security's RememberMeServices.
* <p/>
* Persistent tokens are used by Spring Security to automatically log in users.
* <p/>
* This is a specific implementation of Spring Security's remember-me authentication, but it is much
* more powerful than the standard implementations:
* <ul>
* <li>It allows a user to see the list of his currently opened sessions, and invalidate them</li>
* <li>It stores more information, such as the IP address and the user agent, for audit purposes<li>
* <li>When a user logs out, only his current session is invalidated, and not all of his sessions</li>
* </ul>
* <p/>
* This is inspired by:
* <ul>
* <li><a href="http://jaspan.com/improved_persistent_login_cookie_best_practice">Improved Persistent Login Cookie
* Best Practice</a></li>
* <li><a href="https://github.com/blog/1661-modeling-your-app-s-user-session">Github's "Modeling your App's User Session"</a></li></li>
* </ul>
* <p/>
* The main algorithm comes from Spring Security's PersistentTokenBasedRememberMeServices, but this class
* couldn't be cleanly extended.
* <p/>
*/
@Service
public class CustomPersistentRememberMeServices extends
AbstractRememberMeServices {
private final Logger log = LoggerFactory.getLogger(CustomPersistentRememberMeServices.class);
// Token is valid for one month
private static final int TOKEN_VALIDITY_DAYS = 31;
private static final int TOKEN_VALIDITY_SECONDS = 60 * 60 * 24 * TOKEN_VALIDITY_DAYS;
private static final int DEFAULT_SERIES_LENGTH = 16;
private static final int DEFAULT_TOKEN_LENGTH = 16;
private SecureRandom random;
@Inject
private PersistentTokenRepository persistentTokenRepository;
@Inject
private UserRepository userRepository;
@Inject
public CustomPersistentRememberMeServices(Environment env, org.springframework.security.core.userdetails.UserDetailsService userDetailsService) {
super(env.getProperty("jhipster.security.rememberme.key"), userDetailsService);
random = new SecureRandom();
}
@Override
@Transactional
protected UserDetails processAutoLoginCookie(String[] cookieTokens, HttpServletRequest request, HttpServletResponse response) {
PersistentToken token = getPersistentToken(cookieTokens);
String login = token.getUser().getLogin();
// Token also matches, so login is valid. Update the token value, keeping the *same* series number.
log.debug("Refreshing persistent login token for user '{}', series '{}'", login, token.getSeries());
token.setTokenDate(new LocalDate());
token.setTokenValue(generateTokenData());
token.setIpAddress(request.getRemoteAddr());
token.setUserAgent(request.getHeader("User-Agent"));
try {
persistentTokenRepository.saveAndFlush(token);
addCookie(token, request, response);
} catch (DataAccessException e) {
log.error("Failed to update token: ", e);
throw new RememberMeAuthenticationException("Autologin failed due to data access problem", e);
}
return getUserDetailsService().loadUserByUsername(login);
}
@Override
protected void onLoginSuccess(HttpServletRequest request, HttpServletResponse response, Authentication successfulAuthentication) {
String login = successfulAuthentication.getName();
log.debug("Creating new persistent login for user {}", login);
PersistentToken token = userRepository.findOneByLogin(login).map(u -> {
PersistentToken t = new PersistentToken();
t.setSeries(generateSeriesData());
t.setUser(u);
t.setTokenValue(generateTokenData());
t.setTokenDate(new LocalDate());
t.setIpAddress(request.getRemoteAddr());
t.setUserAgent(request.getHeader("User-Agent"));
return t;
}).orElseThrow(() -> new UsernameNotFoundException("User " + login + " was not found in the database"));
try {
persistentTokenRepository.saveAndFlush(token);
addCookie(token, request, response);
} catch (DataAccessException e) {
log.error("Failed to save persistent token ", e);
}
}
/**
* When logout occurs, only invalidate the current token, and not all user sessions.
* <p/>
* The standard Spring Security implementations are too basic: they invalidate all tokens for the
* current user, so when he logs out from one browser, all his other sessions are destroyed.
*/
@Override
@Transactional
public void logout(HttpServletRequest request, HttpServletResponse response, Authentication authentication) {
String rememberMeCookie = extractRememberMeCookie(request);
if (rememberMeCookie != null && rememberMeCookie.length() != 0) {
try {
String[] cookieTokens = decodeCookie(rememberMeCookie);
PersistentToken token = getPersistentToken(cookieTokens);
persistentTokenRepository.delete(token);
} catch (InvalidCookieException ice) {
log.info("Invalid cookie, no persistent token could be deleted");
} catch (RememberMeAuthenticationException rmae) {
log.debug("No persistent token found, so no token could be deleted");
}
}
super.logout(request, response, authentication);
}
/**
* Validate the token and return it.
*/
private PersistentToken getPersistentToken(String[] cookieTokens) {
if (cookieTokens.length != 2) {
throw new InvalidCookieException("Cookie token did not contain " + 2 +
" tokens, but contained '" + Arrays.asList(cookieTokens) + "'");
}
String presentedSeries = cookieTokens[0];
String presentedToken = cookieTokens[1];
PersistentToken token = persistentTokenRepository.findOne(presentedSeries);
if (token == null) {
// No series match, so we can't authenticate using this cookie
throw new RememberMeAuthenticationException("No persistent token found for series id: " + presentedSeries);
}
// We have a match for this user/series combination
log.info("presentedToken={} / tokenValue={}", presentedToken, token.getTokenValue());
if (!presentedToken.equals(token.getTokenValue())) {
// Token doesn't match series value. Delete this session and throw an exception.
persistentTokenRepository.delete(token);
throw new CookieTheftException("Invalid remember-me token (Series/token) mismatch. Implies previous cookie theft attack.");
}
if (token.getTokenDate().plusDays(TOKEN_VALIDITY_DAYS).isBefore(LocalDate.now())) {
persistentTokenRepository.delete(token);
throw new RememberMeAuthenticationException("Remember-me login has expired");
}
return token;
}
private String generateSeriesData() {
byte[] newSeries = new byte[DEFAULT_SERIES_LENGTH];
random.nextBytes(newSeries);
return new String(Base64.encode(newSeries));
}
private String generateTokenData() {
byte[] newToken = new byte[DEFAULT_TOKEN_LENGTH];
random.nextBytes(newToken);
return new String(Base64.encode(newToken));
}
private void addCookie(PersistentToken token, HttpServletRequest request, HttpServletResponse response) {
setCookie(
new String[]{token.getSeries(), token.getTokenValue()},
TOKEN_VALIDITY_SECONDS, request, response);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.markup.html.form;
import java.util.Collection;
import java.util.List;
import org.apache.wicket.Page;
import org.apache.wicket.markup.ComponentTag;
import org.apache.wicket.markup.MarkupStream;
import org.apache.wicket.model.IModel;
import org.apache.wicket.util.convert.IConverter;
import org.apache.wicket.util.string.AppendingStringBuffer;
import org.apache.wicket.util.string.Strings;
/**
* A choice subclass that shows choices via checkboxes.
* <p>
* Java:
*
* <pre>
* List SITES = Arrays.asList(new String[] { "The Server Side", "Java Lobby", "Java.Net" });
* // Add a set of checkboxes uses Input's 'site' property to designate the
* // current selections, and that uses the SITES list for the available options.
* form.add(new CheckBoxMultipleChoice("site", SITES));
* </pre>
*
* HTML:
*
* <pre>
* <span valign="top" wicket:id="site">
* <input type="checkbox">site 1</input>
* <input type="checkbox">site 2</input>
* </span>
* </pre>
*
* </p>
*
* @author Jonathan Locke
* @author Johan Compagner
* @author Martijn Dashorst
* @author Gwyn Evans
* @author Igor Vaynberg (ivaynberg)
*
* @param <T>
* The model object type
*/
public class CheckBoxMultipleChoice<T> extends ListMultipleChoice<T>
{
private static final long serialVersionUID = 1L;
private String prefix = "";
private String suffix = "<br/>\n";
/**
* Constructor
*
* @param id
* See Component
* @see org.apache.wicket.Component#Component(String)
* @see AbstractChoice#AbstractChoice(String)
*/
public CheckBoxMultipleChoice(final String id)
{
super(id);
}
/**
* Constructor
*
* @param id
* See Component
* @param choices
* The collection of choices in the radio choice
* @see org.apache.wicket.Component#Component(String)
* @see AbstractChoice#AbstractChoice(String, java.util.List)
*/
public CheckBoxMultipleChoice(final String id, final List<? extends T> choices)
{
super(id, choices);
}
/**
* Constructor
*
* @param id
* See Component
* @param renderer
* The rendering engine
* @param choices
* The collection of choices in the radio choice
* @see org.apache.wicket.Component#Component(String)
* @see AbstractChoice#AbstractChoice(String,
* java.util.List,org.apache.wicket.markup.html.form.IChoiceRenderer)
*/
public CheckBoxMultipleChoice(final String id, final List<? extends T> choices,
final IChoiceRenderer<? super T> renderer)
{
super(id, choices, renderer);
}
/**
* Constructor
*
* @param id
* See Component
* @param model
* See Component
* @param choices
* The collection of choices in the radio choice
* @see org.apache.wicket.Component#Component(String, org.apache.wicket.model.IModel)
* @see AbstractChoice#AbstractChoice(String, org.apache.wicket.model.IModel, java.util.List)
*/
public CheckBoxMultipleChoice(final String id, IModel<? extends Collection<T>> model,
final List<? extends T> choices)
{
super(id, model, choices);
}
/**
* Constructor
*
* @param id
* See Component
* @param model
* See Component
* @param choices
* The collection of choices in the radio choice
* @param renderer
* The rendering engine
* @see org.apache.wicket.Component#Component(String, org.apache.wicket.model.IModel)
* @see AbstractChoice#AbstractChoice(String, org.apache.wicket.model.IModel,
* java.util.List,org.apache.wicket.markup.html.form.IChoiceRenderer)
*/
public CheckBoxMultipleChoice(final String id, IModel<? extends Collection<T>> model,
final List<? extends T> choices, final IChoiceRenderer<? super T> renderer)
{
super(id, model, choices, renderer);
}
/**
* Constructor
*
* @param id
* See Component
* @param choices
* The collection of choices in the radio choice
* @see org.apache.wicket.Component#Component(String)
* @see AbstractChoice#AbstractChoice(String, org.apache.wicket.model.IModel)
*/
public CheckBoxMultipleChoice(String id, IModel<? extends List<? extends T>> choices)
{
super(id, choices);
}
/**
* Constructor
*
* @param id
* See Component
* @param model
* The model that is updated with changes in this component. See Component
* @param choices
* The collection of choices in the radio choice
* @see AbstractChoice#AbstractChoice(String,
* org.apache.wicket.model.IModel,org.apache.wicket.model.IModel)
* @see org.apache.wicket.Component#Component(String, org.apache.wicket.model.IModel)
*/
public CheckBoxMultipleChoice(String id, IModel<? extends Collection<T>> model,
IModel<? extends List<? extends T>> choices)
{
super(id, model, choices);
}
/**
* Constructor
*
* @param id
* See Component
* @param choices
* The collection of choices in the radio choice
* @param renderer
* The rendering engine
* @see AbstractChoice#AbstractChoice(String,
* org.apache.wicket.model.IModel,org.apache.wicket.markup.html.form.IChoiceRenderer)
* @see org.apache.wicket.Component#Component(String)
*/
public CheckBoxMultipleChoice(String id, IModel<? extends List<? extends T>> choices,
IChoiceRenderer<? super T> renderer)
{
super(id, choices, renderer);
}
/**
* Constructor
*
* @param id
* See Component
* @param model
* The model that is updated with changes in this component. See Component
* @param choices
* The collection of choices in the radio choice
* @param renderer
* The rendering engine
* @see org.apache.wicket.Component#Component(String, org.apache.wicket.model.IModel)
* @see AbstractChoice#AbstractChoice(String, org.apache.wicket.model.IModel,
* org.apache.wicket.model.IModel,org.apache.wicket.markup.html.form.IChoiceRenderer)
*/
public CheckBoxMultipleChoice(String id, IModel<? extends Collection<T>> model,
IModel<? extends List<? extends T>> choices, IChoiceRenderer<? super T> renderer)
{
super(id, model, choices, renderer);
}
/**
* @return Prefix to use before choice
*/
public String getPrefix()
{
return prefix;
}
/**
* @param index
* index of the choice
* @param choice
* the choice itself
* @return Prefix to use before choice. The default implementation just returns
* {@link #getPrefix()}. Override to have a prefix dependent on the choice item.
*/
protected String getPrefix(int index, T choice)
{
return getPrefix();
}
/**
* @param index
* index of the choice
* @param choice
* the choice itself
* @return Separator to use between radio options. The default implementation just returns
* {@link #getSuffix()}. Override to have a prefix dependent on the choice item.
*/
protected String getSuffix(int index, T choice)
{
return getSuffix();
}
/**
* @param prefix
* Prefix to use before choice
* @return this
*/
public final CheckBoxMultipleChoice<T> setPrefix(final String prefix)
{
// Tell the page that this component's prefix was changed
final Page page = findPage();
if (page != null)
{
addStateChange();
}
this.prefix = prefix;
return this;
}
/**
* @return Separator to use between radio options
*/
public String getSuffix()
{
return suffix;
}
/**
* @param suffix
* Separator to use between radio options
* @return this
*/
public final CheckBoxMultipleChoice<T> setSuffix(final String suffix)
{
// Tell the page that this component's suffix was changed
final Page page = findPage();
if (page != null)
{
addStateChange();
}
this.suffix = suffix;
return this;
}
/**
* @see org.apache.wicket.markup.html.form.ListMultipleChoice#onComponentTag(org.apache.wicket.markup.ComponentTag)
*/
@Override
protected void onComponentTag(ComponentTag tag)
{
super.onComponentTag(tag);
// No longer applicable, breaks XHTML validation.
tag.remove("multiple");
tag.remove("size");
tag.remove("disabled");
tag.remove("name");
}
/**
* @see org.apache.wicket.Component#onComponentTagBody(org.apache.wicket.markup.MarkupStream,
* org.apache.wicket.markup.ComponentTag)
*/
@Override
public final void onComponentTagBody(final MarkupStream markupStream, final ComponentTag openTag)
{
// Iterate through choices
final List<? extends T> choices = getChoices();
// Buffer to hold generated body
final AppendingStringBuffer buffer = new AppendingStringBuffer(70 * (choices.size() + 1));
// Value of this choice
final String selected = getValue();
// Loop through choices
for (int index = 0; index < choices.size(); index++)
{
// Get next choice
final T choice = choices.get(index);
appendOptionHtml(buffer, choice, index, selected);
}
// Replace body
replaceComponentTagBody(markupStream, openTag, buffer);
}
/**
* Generates and appends html for a single choice into the provided buffer
*
* @param buffer
* Appending string buffer that will have the generated html appended
* @param choice
* Choice object
* @param index
* The index of this option
* @param selected
* The currently selected string value
*/
@SuppressWarnings("unchecked")
@Override
protected void appendOptionHtml(final AppendingStringBuffer buffer, final T choice, int index,
final String selected)
{
Object displayValue = getChoiceRenderer().getDisplayValue(choice);
Class<?> objectClass = displayValue == null ? null : displayValue.getClass();
// Get label for choice
String label = "";
if (objectClass != null && objectClass != String.class)
{
@SuppressWarnings("rawtypes")
IConverter converter = getConverter(objectClass);
label = converter.convertToString(displayValue, getLocale());
}
else if (displayValue != null)
{
label = displayValue.toString();
}
// If there is a display value for the choice, then we know that the
// choice is automatic in some way. If label is /null/ then we know
// that the choice is a manually created checkbox tag at some random
// location in the page markup!
if (label != null)
{
// Append option suffix
buffer.append(getPrefix(index, choice));
String id = getChoiceRenderer().getIdValue(choice, index);
final String idAttr = getCheckBoxMarkupId(id);
// Add checkbox element
buffer.append("<input name=\"");
buffer.append(getInputName());
buffer.append("\"");
buffer.append(" type=\"checkbox\"");
if (isSelected(choice, index, selected))
{
buffer.append(" checked=\"checked\"");
}
if (isDisabled(choice, index, selected) || !isEnabledInHierarchy())
{
buffer.append(" disabled=\"disabled\"");
}
buffer.append(" value=\"");
buffer.append(id);
buffer.append("\" id=\"");
buffer.append(idAttr);
buffer.append("\"/>");
// Add label for checkbox
String display = label;
if (localizeDisplayValues())
{
display = getLocalizer().getString(label, this, label);
}
final CharSequence escaped = (getEscapeModelStrings() ? Strings.escapeMarkup(display)
: display);
buffer.append("<label for=\"");
buffer.append(idAttr);
buffer.append("\">").append(escaped).append("</label>");
// Append option suffix
buffer.append(getSuffix(index, choice));
}
}
/**
* Creates markup id for the input tag used to generate the checkbox for the element with the
* specified {@code id}.
* <p>
* NOTE It is useful to override this method if the contract for the genreated ids should be
* fixed, for example in cases when the id generation pattern in this method is used to predict
* ids by some external javascript. If the contract is fixed in the user's code then upgrading
* wicket versions will guarantee not to break it should the default contract be changed at a
* later time.
* </p>
*
* @param id
* @return markup id for the input tag
*/
protected String getCheckBoxMarkupId(String id)
{
return getMarkupId() + "-" + getInputName() + "_" + id;
}
}
| |
/*******************************************************************************
* Copyright 2015 Ivan Shubin http://mindengine.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.mindengine.galen.tests.runner;
import static java.util.Arrays.asList;
import static org.apache.commons.io.FileUtils.readFileToString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.is;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.reflect.Field;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import junit.framework.Assert;
import net.mindengine.galen.components.report.FakeException;
import net.mindengine.galen.components.report.ReportingListenerTestUtils;
import net.mindengine.galen.reports.*;
import net.mindengine.galen.reports.json.JsonReportBuilder;
import net.mindengine.galen.reports.model.FileTempStorage;
import net.mindengine.galen.reports.model.LayoutReport;
import net.mindengine.galen.reports.nodes.LayoutReportNode;
import net.mindengine.galen.reports.LayoutReportListener;
import net.mindengine.galen.tests.GalenEmptyTest;
import org.apache.commons.io.IOUtils;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.google.common.io.Files;
import freemarker.template.TemplateException;
@Test(singleThreaded=true)
public class ReportingTest {
private static final String GALEN_LOG_LEVEL = "galen.log.level";
@AfterMethod public void removeAllSystemProperties() {
System.getProperties().remove(GALEN_LOG_LEVEL);
}
@BeforeMethod
public void resetUniqueId() throws NoSuchFieldException, IllegalAccessException {
resetUniqueIdForFileTempStorage();
}
@Test
public void shouldReportWithEmptyScreenshot_inJsonFormat() throws Exception {
String reportPath = Files.createTempDir().getAbsolutePath() + "/json-report";
List<GalenTestInfo> testInfos = new LinkedList<GalenTestInfo>();
GalenTestInfo testInfo = new GalenTestInfo("Home page test", new GalenEmptyTest("Home page test", asList("mobile", "HOMEPAGE")));
TestReport report = new TestReport();
LayoutReport layoutReport = new LayoutReport();
layoutReport.setScreenshot(null);
ReportingListenerTestUtils.performSampleReporting("Home page test", null, new LayoutReportListener(layoutReport), null);
report.info("Just a simple info node with attachment")
.withAttachment("some-file.txt", File.createTempFile("some-file", ".txt"))
.setTime(new Date(1404681346001L));
report.addNode(new LayoutReportNode(report.getFileStorage(), layoutReport, "check layout"))
.setTime(new Date(1404681346002L));
testInfo.setReport(report);
testInfos.add(testInfo);
testInfo.setStartedAt(new Date(1404681346000L));
testInfo.setEndedAt(new Date(1404681416000L));
new JsonReportBuilder().build(testInfos, reportPath);
assertJsonFileContents("Report overview", reportPath + "/report.json", "/expected-reports/json/report.json");
assertJsonFileContents("Test report", reportPath + "/1-home-page-test.json", "/expected-reports/json/2-home-page-test.json");
// Check that all files from storage were saved in report folder
assertThat("Report folder contains files", asList(new File(reportPath).list()), containsInAnyOrder(
"1-home-page-test.json",
"file-4-some-file.txt",
"layout-1-objectB1-actual.png",
"layout-2-objectB1-expected.png",
"layout-3-objectB1-map.png",
"report.json"
));
}
@Test
public void shouldReport_inJsonFormat() throws Exception {
String reportPath = Files.createTempDir().getAbsolutePath() + "/json-report";
List<GalenTestInfo> testInfos = new LinkedList<GalenTestInfo>();
GalenTestInfo testInfo = new GalenTestInfo("Home page test", new GalenEmptyTest("Home page test", asList("mobile", "HOMEPAGE")));
TestReport report = new TestReport();
LayoutReport layoutReport = new LayoutReport();
layoutReport.setScreenshot(layoutReport.getFileStorage().registerFile("screenshot.png", File.createTempFile("screenshot", ".png")));
ReportingListenerTestUtils.performSampleReporting("Home page test", null, new LayoutReportListener(layoutReport), null);
report.info("Just a simple info node with attachment")
.withAttachment("some-file.txt", File.createTempFile("some-file", ".txt"))
.setTime(new Date(1404681346001L));
report.addNode(new LayoutReportNode(report.getFileStorage(), layoutReport, "check layout"))
.setTime(new Date(1404681346002L));
testInfo.setReport(report);
testInfos.add(testInfo);
testInfo.setStartedAt(new Date(1404681346000L));
testInfo.setEndedAt(new Date(1404681416000L));
new JsonReportBuilder().build(testInfos, reportPath);
assertJsonFileContents("Report overview", reportPath + "/report.json", "/expected-reports/json/report.json");
assertJsonFileContents("Test report", reportPath + "/1-home-page-test.json", "/expected-reports/json/1-home-page-test.json");
// Check that all files from storage were saved in report folder
assertThat("Report folder contains files", asList(new File(reportPath).list()), containsInAnyOrder(
"1-home-page-test.json",
"file-5-some-file.txt",
"layout-1-screenshot.png",
"layout-2-objectB1-actual.png",
"layout-3-objectB1-expected.png",
"layout-4-objectB1-map.png",
"report.json"
));
}
private void resetUniqueIdForFileTempStorage() throws NoSuchFieldException, IllegalAccessException {
Field _uniqueIdField = FileTempStorage.class.getDeclaredField("_uniqueId");
_uniqueIdField.setAccessible(true);
_uniqueIdField.set(null, 0L);
}
@Test public void shouldReport_inTestNgFormat_successfully() throws IOException, TemplateException {
String reportPath = Files.createTempDir().getAbsolutePath() + "/testng-report/report.xml";
List<GalenTestInfo> testInfos = new LinkedList<GalenTestInfo>();
GalenTestInfo testInfo = new GalenTestInfo("Home page test", null);
testInfo.setReport(new TestReport());
testInfo.setStartedAt(new Date(1399741000000L));
testInfo.setEndedAt(new Date(1399746930000L));
testInfo.setException(new FakeException("Some exception here"));
testInfos.add(testInfo);
testInfo = new GalenTestInfo("Login page test", null);
testInfo.setReport(new TestReport());
testInfo.setStartedAt(new Date(1399741000000L));
testInfo.setEndedAt(new Date(1399746930000L));
testInfos.add(testInfo);
new TestNgReportBuilder().build(testInfos, reportPath);
String expectedXml = IOUtils.toString(getClass().getResourceAsStream("/expected-reports/testng-report.xml"));
String realXml = readFileToString(new File(reportPath));
Assert.assertEquals(trimEveryLine(expectedXml), trimEveryLine(realXml));
}
@Test public void shouldReport_inHtmlFormat_withException_andAttachments() throws IOException, TemplateException {
String reportDirPath = Files.createTempDir().getAbsolutePath() + "/reports";
List<GalenTestInfo> testInfos = new LinkedList<GalenTestInfo>();
GalenTestInfo testInfo = new GalenTestInfo("Home page test", null);
testInfo.setStartedAt(new Date(1399741000000L));
testInfo.setEndedAt(new Date(1399746930000L));
File attachmentFile = new File(Files.createTempDir().getAbsolutePath() + File.separator + "custom.txt");
attachmentFile.createNewFile();
testInfo.getReport().error(new FakeException("Some exception here")).withAttachment("custom.txt", attachmentFile);
testInfo.getReport().info("Some detailed report").withDetails("Some details");
testInfo.getReport().getNodes().get(0).setTime(new Date(1399741000000L));
testInfo.getReport().getNodes().get(1).setTime(new Date(1399741000000L));
testInfos.add(testInfo);
new HtmlReportBuilder().build(testInfos, reportDirPath);
assertThat("Should place attachment file in same folder", new File(reportDirPath + "/file-1-custom.txt").exists(), is(true));
}
@Test public void shouldReport_inHtmlWithJsonFormat_successfully_andSplitFiles_perTest() throws IOException, TemplateException {
String reportDirPath = Files.createTempDir().getAbsolutePath() + "/reports";
List<GalenTestInfo> testInfos = new LinkedList<GalenTestInfo>();
GalenTestInfo testInfo = new GalenTestInfo("Home page test", null);
TestReport report = new TestReport();
LayoutReport layoutReport = new LayoutReport();
layoutReport.setScreenshot(layoutReport.getFileStorage().registerFile("screenshot.png", File.createTempFile("screenshot", ".png")));
ReportingListenerTestUtils.performSampleReporting("Home page test", null, new LayoutReportListener(layoutReport), null);
report.info("Just a simple info node with attachment")
.withAttachment("some-file.txt", File.createTempFile("some-file", ".txt"))
.setTime(new Date(1404681346001L));
report.addNode(new LayoutReportNode(report.getFileStorage(), layoutReport, "check layout"))
.setTime(new Date(1404681346002L));
testInfo.setReport(report);
testInfos.add(testInfo);
testInfo.setStartedAt(new Date(1404681346000L));
testInfo.setEndedAt(new Date(1404681416000L));
new HtmlReportBuilder().build(testInfos, reportDirPath);
assertThat("Report folder contains files", asList(new File(reportDirPath).list()), containsInAnyOrder(
"1-home-page-test.html",
"1-home-page-test.json",
"file-5-some-file.txt",
"layout-1-screenshot.png",
"layout-2-objectB1-actual.png",
"layout-3-objectB1-expected.png",
"layout-4-objectB1-map.png",
"report.html",
"report.json",
"handlebars-v2.0.0.js",
"galen-report.js",
"report.css",
"icon-sprites.png",
"jquery-1.11.2.min.js",
"tablesorter.js",
"tablesorter.css"
));
}
private String trimEveryLine(String text) {
String lines[] = text.split("\\r?\\n");
StringBuilder builder = new StringBuilder();
for (String line: lines) {
builder.append(line.trim());
builder.append("\n");
}
return builder.toString();
}
@Test public void shouldReport_toConsole_successfully() throws IOException {
performConsoleReporting_andCompare("/expected-reports/console.txt");
}
@Test public void shouldReport_toConsole_onlySuites_whenLogLevel_is_1() throws IOException {
System.setProperty(GALEN_LOG_LEVEL, "1");
performConsoleReporting_andCompare("/expected-reports/console-1.txt");
}
@Test public void shouldReport_toConsole_onlySuites_andPages_whenLogLevel_is_2() throws IOException {
System.setProperty(GALEN_LOG_LEVEL, "2");
performConsoleReporting_andCompare("/expected-reports/console-2.txt");
}
private void performConsoleReporting_andCompare(String expectedReport) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
ConsoleReportingListener listener = new ConsoleReportingListener(ps, ps);
ReportingListenerTestUtils.performSampleReporting("page1.test", listener, listener, listener);
listener.done();
String expectedText = IOUtils.toString(getClass().getResourceAsStream(expectedReport)).replace("\\t ", "\t");
Assert.assertEquals(expectedText, baos.toString("UTF-8"));
}
private void assertJsonFileContents(String title, String actualPath, String expectedPath) throws IOException {
String actualContent = readFileToString(new File(actualPath));
String expectedContent = readFileToString(new File(getClass().getResource(expectedPath).getFile()));
ObjectMapper mapper = new ObjectMapper();
JsonNode actualTree = mapper.readTree(actualContent);
JsonNode expectedTree = mapper.readTree(expectedContent);
assertThat(title + " content should be", actualTree, is(expectedTree));
}
}
| |
/****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.jspf.core;
import org.apache.james.jspf.ConsoleLogger;
import org.apache.james.jspf.core.MacroData;
import org.apache.james.jspf.core.MacroExpand;
import org.apache.james.jspf.core.exceptions.PermErrorException;
import junit.framework.TestCase;
/**
* RFC4408 8.2. Expansion Examples
*/
public class MacroExpandTest extends TestCase {
private final class rfcIP6MacroData extends rfcIP4MacroData {
public String getInAddress() {
return "ip6";
}
public String getMacroIpAddress() {
return "2.0.0.1.0.D.B.8.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.C.B.0.1";
}
public String getReadableIP() {
return "2001:DB8::CB01";
}
}
private class rfcIP4MacroData implements MacroData {
public String getCurrentSenderPart() {
return "strong-bad";
}
public String getMailFrom() {
return "strong-bad@email.example.com";
}
public String getHostName() {
return "email.example.com";
}
public String getCurrentDomain() {
return "email.example.com";
}
public String getInAddress() {
return "in-addr";
}
public String getClientDomain() {
return "clientdomain";
}
public String getSenderDomain() {
return "email.example.com";
}
public String getMacroIpAddress() {
return "192.0.2.3";
}
public long getTimeStamp() {
return System.currentTimeMillis();
}
public String getReadableIP() {
return "192.0.2.3";
}
public String getReceivingDomain() {
return "receivingdomain";
}
}
MacroExpand defIp4me = null;
MacroExpand defIp6me = null;
protected void setUp() throws Exception {
super.setUp();
defIp4me = new MacroExpand(new ConsoleLogger(), null);
defIp6me = new MacroExpand(new ConsoleLogger(), null);
}
public void testPercS() throws PermErrorException {
assertEquals("strong-bad@email.example.com", defIp4me
.expand("%{s}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testPercK() throws PermErrorException {
try {
defIp4me.expand("%{k}", new rfcIP4MacroData(), MacroExpand.DOMAIN);
fail("%{k} is not a valid expansion");
} catch (PermErrorException e) {
}
}
public void testPercentAloneIsError() throws PermErrorException {
try {
defIp4me.expand("%{s}%", new rfcIP4MacroData(), MacroExpand.DOMAIN);
fail("invalid percent at end of line");
} catch (PermErrorException e) {
}
}
public void testDoublePercent() throws PermErrorException {
assertEquals("%", defIp4me.expand("%%", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testPercO() throws PermErrorException {
assertEquals("email.example.com", defIp4me.expand("%{o}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testPercD() throws PermErrorException {
assertEquals("email.example.com", defIp4me.expand("%{d}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("email.example.com", defIp4me.expand("%{d4}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("email.example.com", defIp4me.expand("%{d3}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("example.com", defIp4me.expand("%{d2}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("com", defIp4me.expand("%{d1}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("com.example.email", defIp4me.expand("%{dr}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("example.email", defIp4me.expand("%{d2r}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testPercL() throws PermErrorException {
assertEquals("strong-bad", defIp4me.expand("%{l}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("strong.bad", defIp4me.expand("%{l-}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("strong-bad", defIp4me.expand("%{lr}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("bad.strong", defIp4me.expand("%{lr-}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
assertEquals("strong", defIp4me.expand("%{l1r-}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testExample1() throws PermErrorException {
assertEquals("3.2.0.192.in-addr._spf.example.com", defIp4me
.expand("%{ir}.%{v}._spf.%{d2}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testExample2() throws PermErrorException {
assertEquals("bad.strong.lp._spf.example.com", defIp4me
.expand("%{lr-}.lp._spf.%{d2}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testExample3() throws PermErrorException {
assertEquals("bad.strong.lp.3.2.0.192.in-addr._spf.example.com",
defIp4me.expand("%{lr-}.lp.%{ir}.%{v}._spf.%{d2}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testExample4() throws PermErrorException {
assertEquals("3.2.0.192.in-addr.strong.lp._spf.example.com", defIp4me
.expand("%{ir}.%{v}.%{l1r-}.lp._spf.%{d2}", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testExample5() throws PermErrorException {
assertEquals("example.com.trusted-domains.example.net", defIp4me
.expand("%{d2}.trusted-domains.example.net", new rfcIP4MacroData(), MacroExpand.DOMAIN));
}
public void testExample6_ipv6() throws PermErrorException {
assertEquals(
"1.0.B.C.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.B.D.0.1.0.0.2.ip6._spf.example.com",
defIp6me.expand("%{ir}.%{v}._spf.%{d2}", new rfcIP6MacroData(), MacroExpand.EXPLANATION));
}
public void testLocalPartWithSpecialChars() throws PermErrorException {
assertEquals(
"+exists:CL.192.0.2.3.FR.test{$LNAME}@email.example.com.spf.test.com",
defIp4me.expand("+exists:CL.%{i}.FR.%{s}.spf.test.com",
new rfcIP4MacroData() {
public String getMailFrom() {
return "test{$LNAME}@email.example.com";
}
public String getCurrentSenderPart() {
return "test{$LNAME}";
}
}, MacroExpand.DOMAIN));
// not sure if \ is allowed in email, but anyway make sure we correctly handle also backslash.
assertEquals(
"+exists:CL.192.0.2.3.FR.tes\\t{$LNAME}@email.example.com.spf.test.com",
defIp4me.expand("+exists:CL.%{i}.FR.%{s}.spf.test.com",
new rfcIP4MacroData() {
public String getMailFrom() {
return "tes\\t{$LNAME}@email.example.com";
}
public String getCurrentSenderPart() {
return "tes\\t{$LNAME}";
}
}, MacroExpand.DOMAIN));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.Diffable;
import org.elasticsearch.cluster.DiffableUtils;
import org.elasticsearch.cluster.InternalClusterInfoService;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
import org.elasticsearch.cluster.service.InternalClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.FromXContentBuilder;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.store.IndexStoreConfig;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.indices.ttl.IndicesTTLService;
import org.elasticsearch.ingest.IngestMetadata;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import static java.util.Collections.unmodifiableSet;
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
import static org.elasticsearch.common.util.set.Sets.newHashSet;
public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, FromXContentBuilder<MetaData>, ToXContent {
public static final MetaData PROTO = builder().build();
public static final String ALL = "_all";
public enum XContentContext {
/* Custom metadata should be returns as part of API call */
API,
/* Custom metadata should be stored as part of the persistent cluster state */
GATEWAY,
/* Custom metadata should be stored as part of a snapshot */
SNAPSHOT
}
public static EnumSet<XContentContext> API_ONLY = EnumSet.of(XContentContext.API);
public static EnumSet<XContentContext> API_AND_GATEWAY = EnumSet.of(XContentContext.API, XContentContext.GATEWAY);
public static EnumSet<XContentContext> API_AND_SNAPSHOT = EnumSet.of(XContentContext.API, XContentContext.SNAPSHOT);
public interface Custom extends Diffable<Custom>, ToXContent {
String type();
Custom fromXContent(XContentParser parser) throws IOException;
EnumSet<XContentContext> context();
}
public static Map<String, Custom> customPrototypes = new HashMap<>();
static {
// register non plugin custom metadata
registerPrototype(RepositoriesMetaData.TYPE, RepositoriesMetaData.PROTO);
registerPrototype(IngestMetadata.TYPE, IngestMetadata.PROTO);
}
/**
* Register a custom index meta data factory. Make sure to call it from a static block.
*/
public static void registerPrototype(String type, Custom proto) {
customPrototypes.put(type, proto);
}
@Nullable
public static <T extends Custom> T lookupPrototype(String type) {
//noinspection unchecked
return (T) customPrototypes.get(type);
}
public static <T extends Custom> T lookupPrototypeSafe(String type) {
//noinspection unchecked
T proto = (T) customPrototypes.get(type);
if (proto == null) {
throw new IllegalArgumentException("No custom metadata prototype registered for type [" + type + "], node likely missing plugins");
}
return proto;
}
public static final Setting<Boolean> SETTING_READ_ONLY_SETTING = Setting.boolSetting("cluster.blocks.read_only", false, true, Setting.Scope.CLUSTER);
public static final ClusterBlock CLUSTER_READ_ONLY_BLOCK = new ClusterBlock(6, "cluster read-only (api)", false, false, RestStatus.FORBIDDEN, EnumSet.of(ClusterBlockLevel.WRITE, ClusterBlockLevel.METADATA_WRITE));
public static final MetaData EMPTY_META_DATA = builder().build();
public static final String CONTEXT_MODE_PARAM = "context_mode";
public static final String CONTEXT_MODE_SNAPSHOT = XContentContext.SNAPSHOT.toString();
public static final String CONTEXT_MODE_GATEWAY = XContentContext.GATEWAY.toString();
private final String clusterUUID;
private final long version;
private final Settings transientSettings;
private final Settings persistentSettings;
private final Settings settings;
private final ImmutableOpenMap<String, IndexMetaData> indices;
private final ImmutableOpenMap<String, IndexTemplateMetaData> templates;
private final ImmutableOpenMap<String, Custom> customs;
private final transient int totalNumberOfShards; // Transient ? not serializable anyway?
private final int numberOfShards;
private final String[] allIndices;
private final String[] allOpenIndices;
private final String[] allClosedIndices;
private final SortedMap<String, AliasOrIndex> aliasAndIndexLookup;
@SuppressWarnings("unchecked")
MetaData(String clusterUUID, long version, Settings transientSettings, Settings persistentSettings, ImmutableOpenMap<String, IndexMetaData> indices, ImmutableOpenMap<String, IndexTemplateMetaData> templates, ImmutableOpenMap<String, Custom> customs, String[] allIndices, String[] allOpenIndices, String[] allClosedIndices, SortedMap<String, AliasOrIndex> aliasAndIndexLookup) {
this.clusterUUID = clusterUUID;
this.version = version;
this.transientSettings = transientSettings;
this.persistentSettings = persistentSettings;
this.settings = Settings.settingsBuilder().put(persistentSettings).put(transientSettings).build();
this.indices = indices;
this.customs = customs;
this.templates = templates;
int totalNumberOfShards = 0;
int numberOfShards = 0;
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
totalNumberOfShards += cursor.value.getTotalNumberOfShards();
numberOfShards += cursor.value.getNumberOfShards();
}
this.totalNumberOfShards = totalNumberOfShards;
this.numberOfShards = numberOfShards;
this.allIndices = allIndices;
this.allOpenIndices = allOpenIndices;
this.allClosedIndices = allClosedIndices;
this.aliasAndIndexLookup = aliasAndIndexLookup;
}
public long version() {
return this.version;
}
public String clusterUUID() {
return this.clusterUUID;
}
/**
* Returns the merged transient and persistent settings.
*/
public Settings settings() {
return this.settings;
}
public Settings transientSettings() {
return this.transientSettings;
}
public Settings persistentSettings() {
return this.persistentSettings;
}
public boolean hasAlias(String alias) {
AliasOrIndex aliasOrIndex = getAliasAndIndexLookup().get(alias);
if (aliasOrIndex != null) {
return aliasOrIndex.isAlias();
} else {
return false;
}
}
public boolean equalsAliases(MetaData other) {
for (ObjectCursor<IndexMetaData> cursor : other.indices().values()) {
IndexMetaData otherIndex = cursor.value;
IndexMetaData thisIndex= index(otherIndex.getIndex());
if (thisIndex == null) {
return false;
}
if (otherIndex.getAliases().equals(thisIndex.getAliases()) == false) {
return false;
}
}
return true;
}
public SortedMap<String, AliasOrIndex> getAliasAndIndexLookup() {
return aliasAndIndexLookup;
}
/**
* Finds the specific index aliases that match with the specified aliases directly or partially via wildcards and
* that point to the specified concrete indices or match partially with the indices via wildcards.
*
* @param aliases The names of the index aliases to find
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
* @return the found index aliases grouped by index
*/
public ImmutableOpenMap<String, List<AliasMetaData>> findAliases(final String[] aliases, String[] concreteIndices) {
assert aliases != null;
assert concreteIndices != null;
if (concreteIndices.length == 0) {
return ImmutableOpenMap.of();
}
boolean matchAllAliases = matchAllAliases(aliases);
ImmutableOpenMap.Builder<String, List<AliasMetaData>> mapBuilder = ImmutableOpenMap.builder();
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index);
List<AliasMetaData> filteredValues = new ArrayList<>();
for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) {
AliasMetaData value = cursor.value;
if (matchAllAliases || Regex.simpleMatch(aliases, value.alias())) {
filteredValues.add(value);
}
}
if (!filteredValues.isEmpty()) {
// Make the list order deterministic
CollectionUtil.timSort(filteredValues, new Comparator<AliasMetaData>() {
@Override
public int compare(AliasMetaData o1, AliasMetaData o2) {
return o1.alias().compareTo(o2.alias());
}
});
mapBuilder.put(index, Collections.unmodifiableList(filteredValues));
}
}
return mapBuilder.build();
}
private static boolean matchAllAliases(final String[] aliases) {
for (String alias : aliases) {
if (alias.equals(ALL)) {
return true;
}
}
return aliases.length == 0;
}
/**
* Checks if at least one of the specified aliases exists in the specified concrete indices. Wildcards are supported in the
* alias names for partial matches.
*
* @param aliases The names of the index aliases to find
* @param concreteIndices The concrete indexes the index aliases must point to order to be returned.
* @return whether at least one of the specified aliases exists in one of the specified concrete indices.
*/
public boolean hasAliases(final String[] aliases, String[] concreteIndices) {
assert aliases != null;
assert concreteIndices != null;
if (concreteIndices.length == 0) {
return false;
}
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index);
List<AliasMetaData> filteredValues = new ArrayList<>();
for (ObjectCursor<AliasMetaData> cursor : indexMetaData.getAliases().values()) {
AliasMetaData value = cursor.value;
if (Regex.simpleMatch(aliases, value.alias())) {
filteredValues.add(value);
}
}
if (!filteredValues.isEmpty()) {
return true;
}
}
return false;
}
/*
* Finds all mappings for types and concrete indices. Types are expanded to
* include all types that match the glob patterns in the types array. Empty
* types array, null or {"_all"} will be expanded to all types available for
* the given indices.
*/
public ImmutableOpenMap<String, ImmutableOpenMap<String, MappingMetaData>> findMappings(String[] concreteIndices, final String[] types) {
assert types != null;
assert concreteIndices != null;
if (concreteIndices.length == 0) {
return ImmutableOpenMap.of();
}
ImmutableOpenMap.Builder<String, ImmutableOpenMap<String, MappingMetaData>> indexMapBuilder = ImmutableOpenMap.builder();
Iterable<String> intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys());
for (String index : intersection) {
IndexMetaData indexMetaData = indices.get(index);
ImmutableOpenMap.Builder<String, MappingMetaData> filteredMappings;
if (isAllTypes(types)) {
indexMapBuilder.put(index, indexMetaData.getMappings()); // No types specified means get it all
} else {
filteredMappings = ImmutableOpenMap.builder();
for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.getMappings()) {
if (Regex.simpleMatch(types, cursor.key)) {
filteredMappings.put(cursor.key, cursor.value);
}
}
if (!filteredMappings.isEmpty()) {
indexMapBuilder.put(index, filteredMappings.build());
}
}
}
return indexMapBuilder.build();
}
/**
* Returns all the concrete indices.
*/
public String[] concreteAllIndices() {
return allIndices;
}
public String[] getConcreteAllIndices() {
return concreteAllIndices();
}
public String[] concreteAllOpenIndices() {
return allOpenIndices;
}
public String[] getConcreteAllOpenIndices() {
return allOpenIndices;
}
public String[] concreteAllClosedIndices() {
return allClosedIndices;
}
public String[] getConcreteAllClosedIndices() {
return allClosedIndices;
}
/**
* Returns indexing routing for the given index.
*/
// TODO: This can be moved to IndexNameExpressionResolver too, but this means that we will support wildcards and other expressions
// in the index,bulk,update and delete apis.
public String resolveIndexRouting(@Nullable String parent, @Nullable String routing, String aliasOrIndex) {
if (aliasOrIndex == null) {
if (routing == null) {
return parent;
}
return routing;
}
AliasOrIndex result = getAliasAndIndexLookup().get(aliasOrIndex);
if (result == null || result.isAlias() == false) {
if (routing == null) {
return parent;
}
return routing;
}
AliasOrIndex.Alias alias = (AliasOrIndex.Alias) result;
if (result.getIndices().size() > 1) {
String[] indexNames = new String[result.getIndices().size()];
int i = 0;
for (IndexMetaData indexMetaData : result.getIndices()) {
indexNames[i++] = indexMetaData.getIndex().getName();
}
throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has more than one index associated with it [" + Arrays.toString(indexNames) + "], can't execute a single index op");
}
AliasMetaData aliasMd = alias.getFirstAliasMetaData();
if (aliasMd.indexRouting() != null) {
if (aliasMd.indexRouting().indexOf(',') != -1) {
throw new IllegalArgumentException("index/alias [" + aliasOrIndex + "] provided with routing value [" + aliasMd.getIndexRouting() + "] that resolved to several routing values, rejecting operation");
}
if (routing != null) {
if (!routing.equals(aliasMd.indexRouting())) {
throw new IllegalArgumentException("Alias [" + aliasOrIndex + "] has index routing associated with it [" + aliasMd.indexRouting() + "], and was provided with routing value [" + routing + "], rejecting operation");
}
}
// Alias routing overrides the parent routing (if any).
return aliasMd.indexRouting();
}
if (routing == null) {
return parent;
}
return routing;
}
public boolean hasIndex(String index) {
return indices.containsKey(index);
}
public boolean hasConcreteIndex(String index) {
return getAliasAndIndexLookup().containsKey(index);
}
public IndexMetaData index(String index) {
return indices.get(index);
}
public IndexMetaData index(Index index) {
return index(index.getName());
}
public ImmutableOpenMap<String, IndexMetaData> indices() {
return this.indices;
}
public ImmutableOpenMap<String, IndexMetaData> getIndices() {
return indices();
}
public ImmutableOpenMap<String, IndexTemplateMetaData> templates() {
return this.templates;
}
public ImmutableOpenMap<String, IndexTemplateMetaData> getTemplates() {
return this.templates;
}
public ImmutableOpenMap<String, Custom> customs() {
return this.customs;
}
public ImmutableOpenMap<String, Custom> getCustoms() {
return this.customs;
}
public <T extends Custom> T custom(String type) {
return (T) customs.get(type);
}
public int totalNumberOfShards() {
return this.totalNumberOfShards;
}
public int getTotalNumberOfShards() {
return totalNumberOfShards();
}
public int numberOfShards() {
return this.numberOfShards;
}
public int getNumberOfShards() {
return numberOfShards();
}
/**
* Identifies whether the array containing type names given as argument refers to all types
* The empty or null array identifies all types
*
* @param types the array containing types
* @return true if the provided array maps to all types, false otherwise
*/
public static boolean isAllTypes(String[] types) {
return types == null || types.length == 0 || isExplicitAllType(types);
}
/**
* Identifies whether the array containing type names given as argument explicitly refers to all types
* The empty or null array doesn't explicitly map to all types
*
* @param types the array containing index names
* @return true if the provided array explicitly maps to all types, false otherwise
*/
public static boolean isExplicitAllType(String[] types) {
return types != null && types.length == 1 && ALL.equals(types[0]);
}
/**
* @param concreteIndex The concrete index to check if routing is required
* @param type The type to check if routing is required
* @return Whether routing is required according to the mapping for the specified index and type
*/
public boolean routingRequired(String concreteIndex, String type) {
IndexMetaData indexMetaData = indices.get(concreteIndex);
if (indexMetaData != null) {
MappingMetaData mappingMetaData = indexMetaData.getMappings().get(type);
if (mappingMetaData != null) {
return mappingMetaData.routing().required();
}
}
return false;
}
@Override
public Iterator<IndexMetaData> iterator() {
return indices.valuesIt();
}
public static boolean isGlobalStateEquals(MetaData metaData1, MetaData metaData2) {
if (!metaData1.persistentSettings.equals(metaData2.persistentSettings)) {
return false;
}
if (!metaData1.templates.equals(metaData2.templates())) {
return false;
}
// Check if any persistent metadata needs to be saved
int customCount1 = 0;
for (ObjectObjectCursor<String, Custom> cursor : metaData1.customs) {
if (customPrototypes.get(cursor.key).context().contains(XContentContext.GATEWAY)) {
if (!cursor.value.equals(metaData2.custom(cursor.key))) return false;
customCount1++;
}
}
int customCount2 = 0;
for (ObjectObjectCursor<String, Custom> cursor : metaData2.customs) {
if (customPrototypes.get(cursor.key).context().contains(XContentContext.GATEWAY)) {
customCount2++;
}
}
if (customCount1 != customCount2) return false;
return true;
}
@Override
public Diff<MetaData> diff(MetaData previousState) {
return new MetaDataDiff(previousState, this);
}
@Override
public Diff<MetaData> readDiffFrom(StreamInput in) throws IOException {
return new MetaDataDiff(in);
}
@Override
public MetaData fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
return Builder.fromXContent(parser);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
Builder.toXContent(this, builder, params);
return builder;
}
private static class MetaDataDiff implements Diff<MetaData> {
private long version;
private String clusterUUID;
private Settings transientSettings;
private Settings persistentSettings;
private Diff<ImmutableOpenMap<String, IndexMetaData>> indices;
private Diff<ImmutableOpenMap<String, IndexTemplateMetaData>> templates;
private Diff<ImmutableOpenMap<String, Custom>> customs;
public MetaDataDiff(MetaData before, MetaData after) {
clusterUUID = after.clusterUUID;
version = after.version;
transientSettings = after.transientSettings;
persistentSettings = after.persistentSettings;
indices = DiffableUtils.diff(before.indices, after.indices, DiffableUtils.getStringKeySerializer());
templates = DiffableUtils.diff(before.templates, after.templates, DiffableUtils.getStringKeySerializer());
customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer());
}
public MetaDataDiff(StreamInput in) throws IOException {
clusterUUID = in.readString();
version = in.readLong();
transientSettings = Settings.readSettingsFromStream(in);
persistentSettings = Settings.readSettingsFromStream(in);
indices = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), IndexMetaData.PROTO);
templates = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), IndexTemplateMetaData.PROTO);
customs = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(),
new DiffableUtils.DiffableValueSerializer<String, Custom>() {
@Override
public Custom read(StreamInput in, String key) throws IOException {
return lookupPrototypeSafe(key).readFrom(in);
}
@Override
public Diff<Custom> readDiff(StreamInput in, String key) throws IOException {
return lookupPrototypeSafe(key).readDiffFrom(in);
}
});
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(clusterUUID);
out.writeLong(version);
Settings.writeSettingsToStream(transientSettings, out);
Settings.writeSettingsToStream(persistentSettings, out);
indices.writeTo(out);
templates.writeTo(out);
customs.writeTo(out);
}
@Override
public MetaData apply(MetaData part) {
Builder builder = builder();
builder.clusterUUID(clusterUUID);
builder.version(version);
builder.transientSettings(transientSettings);
builder.persistentSettings(persistentSettings);
builder.indices(indices.apply(part.indices));
builder.templates(templates.apply(part.templates));
builder.customs(customs.apply(part.customs));
return builder.build();
}
}
@Override
public MetaData readFrom(StreamInput in) throws IOException {
Builder builder = new Builder();
builder.version = in.readLong();
builder.clusterUUID = in.readString();
builder.transientSettings(readSettingsFromStream(in));
builder.persistentSettings(readSettingsFromStream(in));
int size = in.readVInt();
for (int i = 0; i < size; i++) {
builder.put(IndexMetaData.Builder.readFrom(in), false);
}
size = in.readVInt();
for (int i = 0; i < size; i++) {
builder.put(IndexTemplateMetaData.Builder.readFrom(in));
}
int customSize = in.readVInt();
for (int i = 0; i < customSize; i++) {
String type = in.readString();
Custom customIndexMetaData = lookupPrototypeSafe(type).readFrom(in);
builder.putCustom(type, customIndexMetaData);
}
return builder.build();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(version);
out.writeString(clusterUUID);
writeSettingsToStream(transientSettings, out);
writeSettingsToStream(persistentSettings, out);
out.writeVInt(indices.size());
for (IndexMetaData indexMetaData : this) {
indexMetaData.writeTo(out);
}
out.writeVInt(templates.size());
for (ObjectCursor<IndexTemplateMetaData> cursor : templates.values()) {
cursor.value.writeTo(out);
}
out.writeVInt(customs.size());
for (ObjectObjectCursor<String, Custom> cursor : customs) {
out.writeString(cursor.key);
cursor.value.writeTo(out);
}
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(MetaData metaData) {
return new Builder(metaData);
}
/** All known byte-sized cluster settings. */
public static final Set<String> CLUSTER_BYTES_SIZE_SETTINGS = unmodifiableSet(newHashSet(
IndexStoreConfig.INDICES_STORE_THROTTLE_MAX_BYTES_PER_SEC_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING.getKey()));
/** All known time cluster settings. */
public static final Set<String> CLUSTER_TIME_SETTINGS = unmodifiableSet(newHashSet(
IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_INTERNAL_ACTION_TIMEOUT_SETTING.getKey(),
RecoverySettings.INDICES_RECOVERY_INTERNAL_LONG_ACTION_TIMEOUT_SETTING.getKey(),
DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_REROUTE_INTERVAL_SETTING.getKey(),
InternalClusterInfoService.INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.getKey(),
InternalClusterInfoService.INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.getKey(),
DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(),
InternalClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING.getKey()));
/** As of 2.0 we require units for time and byte-sized settings. This methods adds default units to any cluster settings that don't
* specify a unit. */
public static MetaData addDefaultUnitsIfNeeded(ESLogger logger, MetaData metaData) {
Settings.Builder newPersistentSettings = null;
for(Map.Entry<String,String> ent : metaData.persistentSettings().getAsMap().entrySet()) {
String settingName = ent.getKey();
String settingValue = ent.getValue();
if (CLUSTER_BYTES_SIZE_SETTINGS.contains(settingName)) {
try {
Long.parseLong(settingValue);
} catch (NumberFormatException nfe) {
continue;
}
// It's a naked number that previously would be interpreted as default unit (bytes); now we add it:
logger.warn("byte-sized cluster setting [{}] with value [{}] is missing units; assuming default units (b) but in future versions this will be a hard error", settingName, settingValue);
if (newPersistentSettings == null) {
newPersistentSettings = Settings.builder();
newPersistentSettings.put(metaData.persistentSettings());
}
newPersistentSettings.put(settingName, settingValue + "b");
}
if (CLUSTER_TIME_SETTINGS.contains(settingName)) {
try {
Long.parseLong(settingValue);
} catch (NumberFormatException nfe) {
continue;
}
// It's a naked number that previously would be interpreted as default unit (ms); now we add it:
logger.warn("time cluster setting [{}] with value [{}] is missing units; assuming default units (ms) but in future versions this will be a hard error", settingName, settingValue);
if (newPersistentSettings == null) {
newPersistentSettings = Settings.builder();
newPersistentSettings.put(metaData.persistentSettings());
}
newPersistentSettings.put(settingName, settingValue + "ms");
}
}
if (newPersistentSettings != null) {
return new MetaData(
metaData.clusterUUID(),
metaData.version(),
metaData.transientSettings(),
newPersistentSettings.build(),
metaData.getIndices(),
metaData.getTemplates(),
metaData.getCustoms(),
metaData.concreteAllIndices(),
metaData.concreteAllOpenIndices(),
metaData.concreteAllClosedIndices(),
metaData.getAliasAndIndexLookup());
} else {
// No changes:
return metaData;
}
}
public static class Builder {
private String clusterUUID;
private long version;
private Settings transientSettings = Settings.Builder.EMPTY_SETTINGS;
private Settings persistentSettings = Settings.Builder.EMPTY_SETTINGS;
private final ImmutableOpenMap.Builder<String, IndexMetaData> indices;
private final ImmutableOpenMap.Builder<String, IndexTemplateMetaData> templates;
private final ImmutableOpenMap.Builder<String, Custom> customs;
public Builder() {
clusterUUID = "_na_";
indices = ImmutableOpenMap.builder();
templates = ImmutableOpenMap.builder();
customs = ImmutableOpenMap.builder();
}
public Builder(MetaData metaData) {
this.clusterUUID = metaData.clusterUUID;
this.transientSettings = metaData.transientSettings;
this.persistentSettings = metaData.persistentSettings;
this.version = metaData.version;
this.indices = ImmutableOpenMap.builder(metaData.indices);
this.templates = ImmutableOpenMap.builder(metaData.templates);
this.customs = ImmutableOpenMap.builder(metaData.customs);
}
public Builder put(IndexMetaData.Builder indexMetaDataBuilder) {
// we know its a new one, increment the version and store
indexMetaDataBuilder.version(indexMetaDataBuilder.version() + 1);
IndexMetaData indexMetaData = indexMetaDataBuilder.build();
indices.put(indexMetaData.getIndex().getName(), indexMetaData);
return this;
}
public Builder put(IndexMetaData indexMetaData, boolean incrementVersion) {
if (indices.get(indexMetaData.getIndex().getName()) == indexMetaData) {
return this;
}
// if we put a new index metadata, increment its version
if (incrementVersion) {
indexMetaData = IndexMetaData.builder(indexMetaData).version(indexMetaData.getVersion() + 1).build();
}
indices.put(indexMetaData.getIndex().getName(), indexMetaData);
return this;
}
public IndexMetaData get(String index) {
return indices.get(index);
}
public Builder remove(String index) {
indices.remove(index);
return this;
}
public Builder removeAllIndices() {
indices.clear();
return this;
}
public Builder indices(ImmutableOpenMap<String, IndexMetaData> indices) {
this.indices.putAll(indices);
return this;
}
public Builder put(IndexTemplateMetaData.Builder template) {
return put(template.build());
}
public Builder put(IndexTemplateMetaData template) {
templates.put(template.name(), template);
return this;
}
public Builder removeTemplate(String templateName) {
templates.remove(templateName);
return this;
}
public Builder templates(ImmutableOpenMap<String, IndexTemplateMetaData> templates) {
this.templates.putAll(templates);
return this;
}
public Custom getCustom(String type) {
return customs.get(type);
}
public Builder putCustom(String type, Custom custom) {
customs.put(type, custom);
return this;
}
public Builder removeCustom(String type) {
customs.remove(type);
return this;
}
public Builder customs(ImmutableOpenMap<String, Custom> customs) {
this.customs.putAll(customs);
return this;
}
public Builder updateSettings(Settings settings, String... indices) {
if (indices == null || indices.length == 0) {
indices = this.indices.keys().toArray(String.class);
}
for (String index : indices) {
IndexMetaData indexMetaData = this.indices.get(index);
if (indexMetaData == null) {
throw new IndexNotFoundException(index);
}
put(IndexMetaData.builder(indexMetaData)
.settings(settingsBuilder().put(indexMetaData.getSettings()).put(settings)));
}
return this;
}
public Builder updateNumberOfReplicas(int numberOfReplicas, String... indices) {
if (indices == null || indices.length == 0) {
indices = this.indices.keys().toArray(String.class);
}
for (String index : indices) {
IndexMetaData indexMetaData = this.indices.get(index);
if (indexMetaData == null) {
throw new IndexNotFoundException(index);
}
put(IndexMetaData.builder(indexMetaData).numberOfReplicas(numberOfReplicas));
}
return this;
}
public Settings transientSettings() {
return this.transientSettings;
}
public Builder transientSettings(Settings settings) {
this.transientSettings = settings;
return this;
}
public Settings persistentSettings() {
return this.persistentSettings;
}
public Builder persistentSettings(Settings settings) {
this.persistentSettings = settings;
return this;
}
public Builder version(long version) {
this.version = version;
return this;
}
public Builder clusterUUID(String clusterUUID) {
this.clusterUUID = clusterUUID;
return this;
}
public Builder generateClusterUuidIfNeeded() {
if (clusterUUID.equals("_na_")) {
clusterUUID = Strings.randomBase64UUID();
}
return this;
}
public MetaData build() {
// TODO: We should move these datastructures to IndexNameExpressionResolver, this will give the following benefits:
// 1) The datastructures will only be rebuilded when needed. Now during serailizing we rebuild these datastructures
// while these datastructures aren't even used.
// 2) The aliasAndIndexLookup can be updated instead of rebuilding it all the time.
// build all concrete indices arrays:
// TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices.
// When doing an operation across all indices, most of the time is spent on actually going to all shards and
// do the required operations, the bottleneck isn't resolving expressions into concrete indices.
List<String> allIndicesLst = new ArrayList<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
allIndicesLst.add(cursor.value.getIndex().getName());
}
String[] allIndices = allIndicesLst.toArray(new String[allIndicesLst.size()]);
List<String> allOpenIndicesLst = new ArrayList<>();
List<String> allClosedIndicesLst = new ArrayList<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
IndexMetaData indexMetaData = cursor.value;
if (indexMetaData.getState() == IndexMetaData.State.OPEN) {
allOpenIndicesLst.add(indexMetaData.getIndex().getName());
} else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
allClosedIndicesLst.add(indexMetaData.getIndex().getName());
}
}
String[] allOpenIndices = allOpenIndicesLst.toArray(new String[allOpenIndicesLst.size()]);
String[] allClosedIndices = allClosedIndicesLst.toArray(new String[allClosedIndicesLst.size()]);
// build all indices map
SortedMap<String, AliasOrIndex> aliasAndIndexLookup = new TreeMap<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
IndexMetaData indexMetaData = cursor.value;
aliasAndIndexLookup.put(indexMetaData.getIndex().getName(), new AliasOrIndex.Index(indexMetaData));
for (ObjectObjectCursor<String, AliasMetaData> aliasCursor : indexMetaData.getAliases()) {
AliasMetaData aliasMetaData = aliasCursor.value;
AliasOrIndex aliasOrIndex = aliasAndIndexLookup.get(aliasMetaData.getAlias());
if (aliasOrIndex == null) {
aliasOrIndex = new AliasOrIndex.Alias(aliasMetaData, indexMetaData);
aliasAndIndexLookup.put(aliasMetaData.getAlias(), aliasOrIndex);
} else if (aliasOrIndex instanceof AliasOrIndex.Alias) {
AliasOrIndex.Alias alias = (AliasOrIndex.Alias) aliasOrIndex;
alias.addIndex(indexMetaData);
} else if (aliasOrIndex instanceof AliasOrIndex.Index) {
AliasOrIndex.Index index = (AliasOrIndex.Index) aliasOrIndex;
throw new IllegalStateException("index and alias names need to be unique, but alias [" + aliasMetaData.getAlias() + "] and index " + index.getIndex().getIndex() + " have the same name");
} else {
throw new IllegalStateException("unexpected alias [" + aliasMetaData.getAlias() + "][" + aliasOrIndex + "]");
}
}
}
aliasAndIndexLookup = Collections.unmodifiableSortedMap(aliasAndIndexLookup);
return new MetaData(clusterUUID, version, transientSettings, persistentSettings, indices.build(), templates.build(), customs.build(), allIndices, allOpenIndices, allClosedIndices, aliasAndIndexLookup);
}
public static String toXContent(MetaData metaData) throws IOException {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.startObject();
toXContent(metaData, builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
return builder.string();
}
public static void toXContent(MetaData metaData, XContentBuilder builder, ToXContent.Params params) throws IOException {
XContentContext context = XContentContext.valueOf(params.param(CONTEXT_MODE_PARAM, "API"));
builder.startObject("meta-data");
builder.field("version", metaData.version());
builder.field("cluster_uuid", metaData.clusterUUID);
if (!metaData.persistentSettings().getAsMap().isEmpty()) {
builder.startObject("settings");
for (Map.Entry<String, String> entry : metaData.persistentSettings().getAsMap().entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
builder.endObject();
}
if (context == XContentContext.API && !metaData.transientSettings().getAsMap().isEmpty()) {
builder.startObject("transient_settings");
for (Map.Entry<String, String> entry : metaData.transientSettings().getAsMap().entrySet()) {
builder.field(entry.getKey(), entry.getValue());
}
builder.endObject();
}
builder.startObject("templates");
for (ObjectCursor<IndexTemplateMetaData> cursor : metaData.templates().values()) {
IndexTemplateMetaData.Builder.toXContent(cursor.value, builder, params);
}
builder.endObject();
if (context == XContentContext.API && !metaData.indices().isEmpty()) {
builder.startObject("indices");
for (IndexMetaData indexMetaData : metaData) {
IndexMetaData.Builder.toXContent(indexMetaData, builder, params);
}
builder.endObject();
}
for (ObjectObjectCursor<String, Custom> cursor : metaData.customs()) {
Custom proto = lookupPrototypeSafe(cursor.key);
if (proto.context().contains(context)) {
builder.startObject(cursor.key);
cursor.value.toXContent(builder, params);
builder.endObject();
}
}
builder.endObject();
}
public static MetaData fromXContent(XContentParser parser) throws IOException {
Builder builder = new Builder();
// we might get here after the meta-data element, or on a fresh parser
XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName();
if (!"meta-data".equals(currentFieldName)) {
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
// move to the field name (meta-data)
token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new IllegalArgumentException("Expected a field name but got " + token);
}
// move to the next object
token = parser.nextToken();
}
currentFieldName = parser.currentName();
}
if (!"meta-data".equals(parser.currentName())) {
throw new IllegalArgumentException("Expected [meta-data] as a field name but got " + currentFieldName);
}
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalArgumentException("Expected a START_OBJECT but got " + token);
}
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
if ("settings".equals(currentFieldName)) {
builder.persistentSettings(Settings.settingsBuilder().put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())).build());
} else if ("indices".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
builder.put(IndexMetaData.Builder.fromXContent(parser), false);
}
} else if ("templates".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
builder.put(IndexTemplateMetaData.Builder.fromXContent(parser, parser.currentName()));
}
} else {
// check if its a custom index metadata
Custom proto = lookupPrototype(currentFieldName);
if (proto == null) {
//TODO warn
parser.skipChildren();
} else {
Custom custom = proto.fromXContent(parser);
builder.putCustom(custom.type(), custom);
}
}
} else if (token.isValue()) {
if ("version".equals(currentFieldName)) {
builder.version = parser.longValue();
} else if ("cluster_uuid".equals(currentFieldName) || "uuid".equals(currentFieldName)) {
builder.clusterUUID = parser.text();
} else {
throw new IllegalArgumentException("Unexpected field [" + currentFieldName + "]");
}
} else {
throw new IllegalArgumentException("Unexpected token " + token);
}
}
return builder.build();
}
public static MetaData readFrom(StreamInput in) throws IOException {
return PROTO.readFrom(in);
}
}
}
| |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package rx.internal.operators;
import java.util.ArrayList;
import java.util.List;
import rx.Observable;
import rx.Observable.Operator;
import rx.Subscriber;
import rx.observers.SerializedSubscriber;
import rx.subscriptions.SerialSubscription;
/**
* Transforms an Observable that emits Observables into a single Observable that
* emits the items emitted by the most recently published of those Observables.
* <p>
* <img width="640" src="https://github.com/Netflix/RxJava/wiki/images/rx-operators/switchDo.png" alt="">
*
* @param <T> the value type
*/
public final class OperatorSwitch<T> implements Operator<T, Observable<? extends T>> {
@Override
public Subscriber<? super Observable<? extends T>> call(final Subscriber<? super T> child) {
final SerializedSubscriber<T> s = new SerializedSubscriber<T>(child);
final SerialSubscription ssub = new SerialSubscription();
child.add(ssub);
return new Subscriber<Observable<? extends T>>(child) {
final Object guard = new Object();
final NotificationLite<?> nl = NotificationLite.instance();
/** Guarded by guard. */
int index;
/** Guarded by guard. */
boolean active;
/** Guarded by guard. */
boolean mainDone;
/** Guarded by guard. */
List<Object> queue;
/** Guarded by guard. */
boolean emitting;
@Override
public void onNext(Observable<? extends T> t) {
final int id;
synchronized (guard) {
id = ++index;
active = true;
}
Subscriber<T> sub = new Subscriber<T>() {
@Override
public void onNext(T t) {
emit(t, id);
}
@Override
public void onError(Throwable e) {
error(e, id);
}
@Override
public void onCompleted() {
complete(id);
}
};
ssub.set(sub);
t.unsafeSubscribe(sub);
}
@Override
public void onError(Throwable e) {
s.onError(e);
unsubscribe();
}
@Override
public void onCompleted() {
List<Object> localQueue;
synchronized (guard) {
mainDone = true;
if (active) {
return;
}
if (emitting) {
if (queue == null) {
queue = new ArrayList<Object>();
}
queue.add(nl.completed());
return;
}
localQueue = queue;
queue = null;
emitting = true;
}
drain(localQueue);
s.onCompleted();
unsubscribe();
}
void emit(T value, int id) {
List<Object> localQueue;
synchronized (guard) {
if (id != index) {
return;
}
if (emitting) {
if (queue == null) {
queue = new ArrayList<Object>();
}
queue.add(value);
return;
}
localQueue = queue;
queue = null;
emitting = true;
}
boolean once = true;
boolean skipFinal = false;
try {
do {
drain(localQueue);
if (once) {
once = false;
s.onNext(value);
}
synchronized (guard) {
localQueue = queue;
queue = null;
if (localQueue == null) {
emitting = false;
skipFinal = true;
break;
}
}
} while (!s.isUnsubscribed());
} finally {
if (!skipFinal) {
synchronized (guard) {
emitting = false;
}
}
}
}
void drain(List<Object> localQueue) {
if (localQueue == null) {
return;
}
for (Object o : localQueue) {
if (nl.isCompleted(o)) {
s.onCompleted();
break;
} else
if (nl.isError(o)) {
s.onError(nl.getError(o));
break;
} else {
@SuppressWarnings("unchecked")
T t = (T)o;
s.onNext(t);
}
}
}
void error(Throwable e, int id) {
List<Object> localQueue;
synchronized (guard) {
if (id != index) {
return;
}
if (emitting) {
if (queue == null) {
queue = new ArrayList<Object>();
}
queue.add(nl.error(e));
return;
}
localQueue = queue;
queue = null;
emitting = true;
}
drain(localQueue);
s.onError(e);
unsubscribe();
}
void complete(int id) {
List<Object> localQueue;
synchronized (guard) {
if (id != index) {
return;
}
active = false;
if (!mainDone) {
return;
}
if (emitting) {
if (queue == null) {
queue = new ArrayList<Object>();
}
queue.add(nl.completed());
return;
}
localQueue = queue;
queue = null;
emitting = true;
}
drain(localQueue);
s.onCompleted();
unsubscribe();
}
};
}
}
| |
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.plugin.addrbook.msoutlook.calendar;
import java.nio.*;
import java.text.*;
import java.util.*;
import net.java.sip.communicator.service.calendar.*;
/**
* The class represents the recurring pattern structure of calendar item.
*
* @author Hristo Terezov
*/
public class RecurringPattern
{
/**
* Enum for the type of the pattern.
*/
public enum PatternType
{
/**
* Daily recurrence.
*/
Day((short)0x0000),
/**
* Weekly recurrence.
*/
Week((short)0x0001),
/**
* Monthly recurrence.
*/
Month((short)0x0002),
/**
* Monthly recurrence.
*/
MonthNth((short)0x0003),
/**
* Monthly recurrence.
*/
MonthEnd((short)0x004),
/**
* Monthly recurrence.
*/
HjMonth((short)0x000A),
/**
* Monthly recurrence.
*/
HjMonthNth((short)0x000B),
/**
* Monthly recurrence.
*/
HjMonthEnd((short)0x000C);
/**
* The value of the type.
*/
private final short value;
/**
* Constructs new <tt>PatternType</tt> instance.
* @param value the value.
*/
PatternType(short value)
{
this.value = value;
}
/**
* Returns the value of the <tt>PatternType</tt> instance.
* @return the value
*/
public short getValue()
{
return value;
}
/**
* Finds the <tt>PatternType</tt> by given value.
* @param value the value
* @return the found <tt>PatternType</tt> instance or null if no type is
* found.
*/
public static PatternType getFromShort(short value)
{
for(PatternType type : values())
{
if(type.getValue() == value)
return type;
}
return null;
}
}
/**
* The value of recurFrequency field.
*/
private short recurFrequency;
/**
* The value of patternType field.
*/
private PatternType patternType;
/**
* The value of calendarType field.
*/
private short calendarType;
/**
* The value of firstDateTime field.
*/
private int firstDateTime;
/**
* The value of period field.
*/
private int period;
/**
* The value of slidingFlag field.
*/
private int slidingFlag;
/**
* The value of patternSpecific1 field.
*/
private int patternSpecific1;
/**
* The value of patternSpecific2 field.
*/
private int patternSpecific2;
/**
* The value of endType field.
*/
private int endType;
/**
* The value of occurenceCount field.
*/
private int occurenceCount;
/**
* The value of firstDow field.
*/
private int firstDow;
/**
* The value of deletedInstanceCount field.
*/
private int deletedInstanceCount;
/**
* The value of modifiedInstanceCount field.
*/
private int modifiedInstanceCount;
/**
* The value of startDate field.
*/
private int startDate;
/**
* The value of endDate field.
*/
private int endDate;
/**
* List with the start dates of deleted instances.
*/
private List<Date> deletedInstances = new ArrayList<Date>();
/**
* Array with the start dates of modified instances.
*/
private int[] modifiedInstances;
/**
* List of exception info structures included in the pattern.
*/
private List<ExceptionInfo> exceptionInfo;
/**
* The source calendar item of the recurrent series.
*/
private CalendarItemTimerTask sourceTask;
/**
* List of days of week when the calendar item occurred.
*/
private List<Integer> allowedDaysOfWeek = new LinkedList<Integer>();
/**
* The binary data of the pattern.
*/
private ByteBuffer dataBuffer;
/**
* Array with masks for days of week when the calendar item occurs.
*/
public static int[] weekOfDayMask
= {0x00000001, 0x00000002, 0x00000004, 0x00000008, 0x00000010,
0x00000020, 0x00000040};
/**
* Parses the binary data that describes the recurrent pattern.
* @param data the binary data.
* @param sourceTask the calendar item.
* @throws IndexOutOfBoundsException if data can't be parsed.
*/
public RecurringPattern(byte[] data, CalendarItemTimerTask sourceTask)
throws IndexOutOfBoundsException
{
this.sourceTask = sourceTask;
dataBuffer = ByteBuffer.wrap(data).order(ByteOrder.LITTLE_ENDIAN);
int offset = 4;
recurFrequency = dataBuffer.getShort(offset);
offset += 2;
patternType = PatternType.getFromShort(dataBuffer.getShort(offset));
offset += 2;
calendarType = dataBuffer.getShort(offset);
offset += 2;
firstDateTime = dataBuffer.getInt(offset);
offset += 4;
period = dataBuffer.getInt(offset);
offset += 4;
slidingFlag = dataBuffer.getInt(offset);
offset += 4;
switch(patternType)
{
case Week:
case Month:
case MonthEnd:
case HjMonth:
case HjMonthEnd:
patternSpecific1 = dataBuffer.getInt(offset);
patternSpecific2 = 0;
offset +=4;
if(patternType == PatternType.Week)
{
for(int day = firstDow; day < firstDow + 7; day++)
{
if((patternSpecific1 & (weekOfDayMask[day%7])) != 0)
allowedDaysOfWeek.add((day%7) + 1);
}
}
break;
case MonthNth:
case HjMonthNth:
patternSpecific1 = dataBuffer.getInt(offset);
patternSpecific2 = dataBuffer.getInt(offset + 4);
if(patternSpecific1 == 0x7f && patternSpecific2 != 0x5)
{
patternType = PatternType.Month;
}
for(int day = 0; day < 7; day++)
{
if((patternSpecific1 & (weekOfDayMask[day])) != 0)
allowedDaysOfWeek.add((day) + 1);
}
offset +=8;
break;
default:
break;
}
//endType
endType = dataBuffer.getInt(offset);
offset += 4;
occurenceCount = dataBuffer.getInt(offset);
offset += 4;
firstDow = dataBuffer.getInt(offset);
offset += 4;
deletedInstanceCount = dataBuffer.getInt(offset);
offset += 4;
//deleted instances
for(int i = 0; i < deletedInstanceCount; i ++)
{
deletedInstances.add(
windowsTimeToDateObject(dataBuffer.getInt(offset)));
offset += 4;
}
modifiedInstanceCount = dataBuffer.getInt(offset);
offset += 4;
//modified instances
modifiedInstances = new int[modifiedInstanceCount];
for(int i = 0; i < modifiedInstanceCount; i ++)
{
modifiedInstances[i] = dataBuffer.getInt(offset);
offset += 4;
}
startDate = dataBuffer.getInt(offset);
offset += 4;
endDate = dataBuffer.getInt(offset);
offset += 4;
offset += 16;
short exceptionCount = dataBuffer.getShort(offset);
offset += 2;
exceptionInfo = new ArrayList<ExceptionInfo>(exceptionCount);
for(int i = 0; i < exceptionCount;i++)
{
ExceptionInfo tmpExceptionInfo = new ExceptionInfo(offset);
exceptionInfo.add(tmpExceptionInfo);
offset += tmpExceptionInfo.sizeInBytes();
CalendarService.BusyStatusEnum status
= tmpExceptionInfo.getBusyStatus();
Date startTime = tmpExceptionInfo.getStartDate();
Date endTime = tmpExceptionInfo.getEndDate();
if(status == CalendarService.BusyStatusEnum.FREE
|| startTime == null || endTime == null)
continue;
Date currentTime = new Date();
if(endTime.before(currentTime) || endTime.equals(currentTime))
return;
boolean executeNow = false;
if(startTime.before(currentTime) || startTime.equals(currentTime))
executeNow = true;
CalendarItemTimerTask task = new CalendarItemTimerTask(status,
startTime, endTime, sourceTask.getId(), executeNow, this);
task.scheduleTasks();
}
}
/**
* Converts windows time in minutes from 1/1/1601 to <tt>Date</tt> object.
* @param time the number of minutes from 1/1/1601
* @return the <tt>Date</tt> object
*/
public static Date windowsTimeToDateObject(long time) {
// Date.parse("1/1/1601") == 11644473600000L
long date = time * 60000 - 11644473600000L;
date -= TimeZone.getDefault().getOffset(date);
return new Date(date);
}
/**
* Prints the properties of the class for debugging purpose.
*/
@Override
public String toString()
{
String result = "";
result
+= "recurFrequency: " + String.format("%#02x", this.recurFrequency)
+ "\n";
result += "patternType: "
+ String.format("%#02x", this.patternType.getValue()) + "\n";
result += "calendarType: "
+ String.format("%#02x", this.calendarType) + "\n";
result += "endType: " + String.format("%#04x", this.endType) + "\n";
result += "period: " + this.period + "\n";
result += "occurenceCount: "
+ String.format("%#04x", this.occurenceCount) + "\n";
result += "patternSpecific1: "
+ String.format("%#04x", this.patternSpecific1) + "\n";
result += "patternSpecific2: "
+ String.format("%#04x", this.patternSpecific2) + "\n";
result += "startDate hex: " + String.format("%#04x", this.startDate)
+ "\n";
result += "endDate hex: " + String.format("%#04x", this.endDate) + "\n";
result += "startDate: "
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(
windowsTimeToDateObject(this.startDate)) + "\n";
result += "endDate: "
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(
windowsTimeToDateObject(this.endDate)) + "\n";
for(int i = 0; i < modifiedInstanceCount; i++)
{
result += "modified Instance date hex: "
+ String.format("%#04x", this.modifiedInstances[i]) + "\n";
result += "modified Instance date: "
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z").format(
windowsTimeToDateObject(this.modifiedInstances[i])) + "\n";
}
for(int i = 0; i < deletedInstanceCount; i++)
{
result += "deleted Instance date: "
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z").format(
deletedInstances.get(i)) + "\n";
}
result += "patternSpecific2: "
+ String.format("%#04x", this.patternSpecific2) + "\n";
result += "\n\n =====================Exeptions====================\n\n";
for(ExceptionInfo info : exceptionInfo)
{
result += info.toString() + "\n\n";
}
return result;
}
/**
* Checks whether the given date is in the recurrent pattern range or not
* @param date the date
* @return <tt>true</tt> if the date is in the pattern range.
*/
private boolean dateOutOfRange(Date date)
{
Calendar cal = Calendar.getInstance();
cal.setTime(date);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE,0);
cal.set(Calendar.SECOND,0);
if((endType != 0x00002023) && (endType != 0xFFFFFFFF)
&& cal.getTime().after(windowsTimeToDateObject(this.endDate)))
{
return true;// the series are finished
}
return false;
}
/**
* Calculates and creates the next calendar item.
* @param previousStartDate the start date of the previous occurrence.
* @param previousEndDate the end date of the previous occurrence.
* @return the new calendar item or null if there are no more calendar items
* from that recurrent series.
*/
public CalendarItemTimerTask next(Date previousStartDate,
Date previousEndDate)
{
if(dateOutOfRange(new Date()))
{
return null;
}
Date startDate = previousStartDate;
Date endDate = null;
boolean executeNow = false;
long duration = sourceTask.getEndDate().getTime()
- sourceTask.getStartDate().getTime();
switch(patternType)
{
case Day:
{
startDate
= new Date(startDate.getTime() + period * 60000);
endDate = new Date(
previousEndDate.getTime() + period * 60000);
Date currentDate = new Date();
if(endDate.before(currentDate))
{
long offset
= currentDate.getTime() - endDate.getTime();
offset -= offset % (period * 60000);
if(endDate.getTime() + offset < currentDate.getTime())
{
offset += period * 60000;
}
startDate = new Date(startDate.getTime() + offset);
}
Calendar cal = Calendar.getInstance();
cal.setTime(startDate);
Calendar cal2 = (Calendar) cal.clone();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
while(deletedInstances.contains(cal.getTime()))
{
cal.add(Calendar.MINUTE, period);
cal2.add(Calendar.MINUTE, period);
}
if(dateOutOfRange(cal.getTime()))
{
return null;
}
startDate = cal2.getTime();
endDate = new Date(startDate.getTime() + duration);
if(startDate.before(currentDate))
{
executeNow = true;
}
return new CalendarItemTimerTask(
sourceTask.getStatus(),
startDate, endDate, sourceTask.getId(), executeNow, this);
}
case Week:
{
Calendar cal = Calendar.getInstance();
/**
* The enum for the firstDow field is the same as Calendar day of
* week enum + 1 day
*/
cal.setFirstDayOfWeek(firstDow + 1);
cal.setTime(startDate);
int dayOfWeek = cal.get(Calendar.DAY_OF_WEEK);
int index = allowedDaysOfWeek.indexOf(dayOfWeek);
if(++index < allowedDaysOfWeek.size())
{
cal.set(Calendar.DAY_OF_WEEK, allowedDaysOfWeek.get(index));
startDate = cal.getTime();
endDate = new Date(startDate.getTime() + duration);
}
else
{
cal.set(Calendar.DAY_OF_WEEK, allowedDaysOfWeek.get(0));
cal.add(Calendar.WEEK_OF_YEAR, period);
startDate = cal.getTime();
endDate = new Date(startDate.getTime() + duration);
}
Date currentDate = new Date();
if(endDate.before(currentDate))
{
cal.set(Calendar.DAY_OF_WEEK, allowedDaysOfWeek.get(0));
endDate = new Date(cal.getTimeInMillis() + duration);
long offset = (currentDate.getTime() - endDate.getTime());
//1 week = 604800000 is milliseconds
offset -= offset % (period * 604800000);
if(endDate.getTime() + offset < currentDate.getTime())
{
cal.add(Calendar.WEEK_OF_YEAR,
(int)(offset / (period * 604800000)));
int i = 1;
while(((cal.getTimeInMillis() + duration)
< (currentDate.getTime())))
{
if(i == allowedDaysOfWeek.size())
{
cal.add(Calendar.WEEK_OF_YEAR, period);
i = 0;
}
cal.set(Calendar.DAY_OF_WEEK, allowedDaysOfWeek.get(i));
i++;
}
startDate = cal.getTime();
}
else
{
startDate = new Date(cal.getTimeInMillis() + offset);
}
}
cal.setTime(startDate);
Calendar cal2 = (Calendar) cal.clone();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
dayOfWeek = cal.get(Calendar.DAY_OF_WEEK);
index = allowedDaysOfWeek.indexOf(dayOfWeek) + 1;
while(deletedInstances.contains(cal.getTime()))
{
if(index >= allowedDaysOfWeek.size())
{
index = 0;
cal.add(Calendar.WEEK_OF_YEAR, period);
cal2.add(Calendar.WEEK_OF_YEAR, period);
}
cal.set(Calendar.DAY_OF_WEEK, allowedDaysOfWeek.get(index));
cal2.set(Calendar.DAY_OF_WEEK, allowedDaysOfWeek.get(index));
index++;
}
startDate = cal2.getTime();
endDate = new Date(startDate.getTime() + duration);
if(dateOutOfRange(endDate))
return null;
if(startDate.before(currentDate))
{
executeNow = true;
}
return new CalendarItemTimerTask(
sourceTask.getStatus(),
startDate, endDate, sourceTask.getId(), executeNow, this);
}
case Month:
case MonthEnd:
case HjMonth:
case HjMonthEnd:
{
return nextMonth(startDate, endDate, false);
}
case MonthNth:
case HjMonthNth:
{
if(patternSpecific1 == 0x7f && patternSpecific2 == 0x05)
return nextMonth(startDate, endDate, true);
else
return nextMonthN(startDate, endDate);
}
}
return null;
}
/**
* Finds the occurrence of the events in the next months
* @param cal the calendar object
* @param lastDay if <tt>true</tt> it will return the last day of the month
* @param period the number of months to add
* @return the calendar object with set date
*/
private Calendar incrementMonths(Calendar cal, boolean lastDay,
int period)
{
int dayOfMonth = patternSpecific1;
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.add(Calendar.MONTH, period);
if(lastDay
|| (cal.getActualMaximum(Calendar.DAY_OF_MONTH) < dayOfMonth))
dayOfMonth = cal.getActualMaximum(Calendar.DAY_OF_MONTH);
cal.set(Calendar.DAY_OF_MONTH, dayOfMonth);
return cal;
}
/**
* Finds the next occurrence for monthly recurrence.
* @param startDate the start date of the previous calendar item.
* @param endDate the end date of the previous calendar item.
* @param lastDay if <tt>true</tt> we are interested in last day of the
* month
* @return the next item
*/
public CalendarItemTimerTask nextMonth(Date startDate, Date endDate,
boolean lastDay)
{
long duration = sourceTask.getEndDate().getTime()
- sourceTask.getStartDate().getTime();
Calendar cal = Calendar.getInstance();
cal.setTime(startDate);
cal = incrementMonths(cal, lastDay, period);
Date currentDate = new Date();
if(cal.getTimeInMillis() + duration < currentDate.getTime())
{
Calendar cal2 = Calendar.getInstance();
cal2.setTime(currentDate);
int years
= cal2.get(Calendar.YEAR) - cal.get(Calendar.YEAR);
int months = (years * 12)
+ (cal2.get(Calendar.MONTH) - cal.get(Calendar.MONTH));
int monthsToAdd = months;
monthsToAdd -= months % period;
cal = incrementMonths(cal, lastDay, monthsToAdd);
if(cal.getTimeInMillis() + duration < currentDate.getTime())
{
cal = incrementMonths(cal, lastDay, period);
}
}
Calendar cal2 = (Calendar) cal.clone();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
while(deletedInstances.contains(cal.getTime()))
{
cal = incrementMonths(cal, lastDay, period);
cal2 = incrementMonths(cal2, lastDay, period);
}
startDate = cal2.getTime();
endDate = new Date(startDate.getTime() + duration);
if(dateOutOfRange(endDate))
{
return null;
}
boolean executeNow = startDate.before(currentDate);
return new CalendarItemTimerTask(
sourceTask.getStatus(),
startDate, endDate, sourceTask.getId(), executeNow, this);
}
/**
* Finds the occurrence of the events in the next months
* @param startDate the start date if the calendar item
* @param dayOfWeekInMonth the number of week days occurrences
* @return the date of the next occurrence
*/
private Date getMonthNStartDate(Date startDate, int dayOfWeekInMonth)
{
Calendar cal = Calendar.getInstance();
cal.setTime(startDate);
if(dayOfWeekInMonth == -1)
{
Date result = null;
cal.set(Calendar.DAY_OF_WEEK_IN_MONTH, dayOfWeekInMonth);
for(int day : allowedDaysOfWeek)
{
cal.set(Calendar.DAY_OF_WEEK, day);
if(result == null || result.before(cal.getTime()))
result = cal.getTime();
}
return result;
}
else
while(dayOfWeekInMonth > 0)
{
int dayOfWeek = cal.get(Calendar.DAY_OF_WEEK);
if(allowedDaysOfWeek.contains(dayOfWeek))
dayOfWeekInMonth--;
if(dayOfWeekInMonth > 0)
cal.add(Calendar.DAY_OF_MONTH, 1);
}
return cal.getTime();
}
/**
* Finds the next occurrence for monthly Nth recurrence.
* @param startDate the start date of the previous calendar item.
* @param endDate the end date of the previous calendar item.
* @return the next item
*/
public CalendarItemTimerTask nextMonthN(Date startDate, Date endDate)
{
int dayOfWeekInMonth = (patternSpecific2 == 5? -1 : patternSpecific2);
long duration = sourceTask.getEndDate().getTime()
- sourceTask.getStartDate().getTime();
Calendar cal = Calendar.getInstance();
cal.setTime(startDate);
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.add(Calendar.MONTH, period);
cal.setTime(getMonthNStartDate(cal.getTime(), dayOfWeekInMonth));
Date currentDate = new Date();
if(cal.getTimeInMillis() + duration < currentDate.getTime())
{
Calendar cal2 = Calendar.getInstance();
cal2.setTime(currentDate);
int years
= cal2.get(Calendar.YEAR) - cal.get(Calendar.YEAR);
int months = (years * 12)
+ (cal2.get(Calendar.MONTH) - cal.get(Calendar.MONTH));
int monthsToAdd = months;
monthsToAdd -= months % period;
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.add(Calendar.MONTH, monthsToAdd);
cal.setTime(getMonthNStartDate(cal.getTime(), dayOfWeekInMonth));
if(cal.getTimeInMillis() + duration < currentDate.getTime())
{
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.add(Calendar.MONTH, monthsToAdd);
cal.setTime(getMonthNStartDate(cal.getTime(), dayOfWeekInMonth));
}
}
Calendar cal2 = (Calendar) cal.clone();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
while(deletedInstances.contains(cal.getTime()))
{
cal.set(Calendar.DAY_OF_MONTH, 1);
cal.add(Calendar.MONTH, period);
startDate = null;
for(int dayOfWeek : allowedDaysOfWeek)
{
cal.set(Calendar.DAY_OF_WEEK, dayOfWeek);
cal.set(Calendar.DAY_OF_WEEK_IN_MONTH,dayOfWeekInMonth);
if((cal.after(startDate) && dayOfWeekInMonth == -1)
|| (cal.before(startDate) && dayOfWeekInMonth != -1)
|| startDate == null)
{
startDate = cal.getTime();
cal2.set(Calendar.YEAR,cal.get(Calendar.YEAR));
cal2.set(Calendar.MONTH,cal.get(Calendar.MONTH));
cal2.set(Calendar.DATE,cal.get(Calendar.DATE));
}
}
}
startDate = cal2.getTime();
endDate = new Date(startDate.getTime() + duration);
if(dateOutOfRange(endDate))
return null;
boolean executeNow = false;
if(startDate.before(currentDate))
{
executeNow = true;
}
return new CalendarItemTimerTask(
sourceTask.getStatus(),
startDate, endDate, sourceTask.getId(), executeNow, this);
}
/**
* Represents the exception info structure.
*/
public class ExceptionInfo
{
/**
* The start date of the exception.
*/
private final Date startDate;
/**
* The end date of the exception.
*/
private final Date endDate;
/**
* The original start date of the exception.
*/
private final Date originalStartDate;
/**
* The modified flags of the exception.
*/
private final short overrideFlags;
/**
* The new busy status of the exception.
*/
private CalendarService.BusyStatusEnum busyStatus;
/**
* The size of the fixed fields.
*/
private int size = 22;
/**
* Parses the data of the exception.
* @param offset the position where the exception starts in the binary
* data
*/
public ExceptionInfo(int offset)
{
startDate = windowsTimeToDateObject(dataBuffer.getInt(offset));
offset += 4;
endDate = windowsTimeToDateObject(dataBuffer.getInt(offset));
offset += 4;
originalStartDate
= windowsTimeToDateObject(dataBuffer.getInt(offset));
offset += 4;
overrideFlags = dataBuffer.getShort(offset);
offset += 2;
int[] fieldMasks = {0x0001, 0x0002, 0x0004, 0x0008, 0x0010,
0x0020, 0x0040, 0x0080};
for(int mask : fieldMasks)
{
if(mask == 0x0020)
{
if((overrideFlags & mask) != 0)
{
busyStatus = CalendarService.BusyStatusEnum.getFromLong(
(long)dataBuffer.getInt(offset));
}
if(busyStatus == null)
{
busyStatus = sourceTask.getStatus();
}
}
if((overrideFlags & mask) != 0)
{
if(mask == 0x0010 || mask == 0x0001)
{
short size = dataBuffer.getShort(offset + 2);
offset += size;
size += size;
}
offset += 4;
size += 4;
}
}
offset += 4;
int reservedBlockSize = dataBuffer.getShort(offset);
size += reservedBlockSize;
}
/**
* Returns the size of the exception
* @return the size of the exception
*/
public int sizeInBytes()
{
return size;
}
/**
* Returns the start date
* @return the start date
*/
public Date getStartDate()
{
return startDate;
}
/**
* Returns the end date
* @return the end date
*/
public Date getEndDate()
{
return endDate;
}
/**
* Returns the busy status
* @return the busy status
*/
public CalendarService.BusyStatusEnum getBusyStatus()
{
return busyStatus;
}
/**
* Prints the properties of the class for debugging purpose.
*/
@Override
public String toString()
{
String result = "";
result += "startDate: "
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z")
.format(startDate) + "\n";
result += "endDate: "
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z")
.format(endDate) + "\n";
result += "originalStartDate: "
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z")
.format(originalStartDate) + "\n";
return result;
}
}
}
| |
/**
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.waveprotocol.wave.model.util;
import static java.util.Arrays.asList;
import junit.framework.TestCase;
import java.util.ArrayList;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
/**
* Test case for {@link ConcurrentList}.
*
*/
public class ConcurrentListTest extends TestCase {
/** Instance being tested. Created in {@link #setUp()}. */
private ConcurrentList<String> list;
/** Dummy items used in the tests. */
private String a;
private String b;
private String c;
private String d;
private String e;
@Override
protected void setUp() throws Exception {
list = ConcurrentList.create();
a = "a";
b = "b";
c = "c";
d = "d";
e = "e";
}
/**
* Creates a list of items in the order returned by iterating over a
* ConcurrentList.
*
* @param xs a ConcurrentList
* @return a list containing the items returned by iterating over {@code xs}.
*/
private static <T> List<T> toList(ConcurrentList<T> xs) {
List<T> list = new ArrayList<T>();
for (T x : xs) {
list.add(x);
}
return list;
}
public void testBasicAddAndRemove() {
assertTrue(list.isEmpty());
list.add(a);
assertEquals("initial add failed", asList(a), toList(list));
assertTrue(!list.isEmpty());
list.add(b);
assertEquals("second add failed", asList(b, a), toList(list));
assertTrue(!list.isEmpty());
list.remove(a);
assertEquals("initial remove failed", asList(b), toList(list));
assertTrue(!list.isEmpty());
list.remove(b);
assertEquals("second remove failed", Collections.<String>emptyList(), toList(list));
assertTrue(list.isEmpty());
}
public void testAddWhileIteratingDoesNotCauseCme() {
list.add(a);
list.add(b);
list.add(c);
try {
// We attempt to add d on every iteration.
for (String x : list) {
list.add(d);
}
} catch (ConcurrentModificationException e) {
fail("addition during iteration caused CME");
}
}
public void testRemoveWhileIteratingDoesNotCauseCme() {
list.add(d);
list.add(c);
list.add(b);
list.add(a);
try {
Iterator<String> i = list.iterator();
// (* means iterator reference, [] means deleted
// pre state: a* b c d
assertTrue(i.hasNext());
assertEquals(a, i.next());
// post state: a b* c d
// pre state: a b* c d
list.remove(c);
assertEquals(asList(a, b, d), toList(list));
// post state: a b* d
// pre state: a b* d
assertTrue(i.hasNext());
assertEquals(b, i.next());
// post state: a b d*
// pre state: a b d*
list.remove(d);
assertEquals(asList(a, b), toList(list));
// post state: a b [d*]
assertFalse(i.hasNext());
} catch (ConcurrentModificationException e) {
fail("removal during iteration caused CME");
}
}
public void testAddAndRemoveWhileIterating() {
list.add(e);
list.add(d);
list.add(c);
list.add(b);
list.add(a);
try {
Iterator<String> i = list.iterator();
//
// pre state: a* b c d e
assertTrue(i.hasNext());
assertEquals(a, i.next());
// post state: a b* c d e
// pre state: a b* c d e
list.remove(e);
list.add(e);
assertEquals(asList(e, a, b, c, d), toList(list));
// post state: e a b* c d
// pre state: e a b* c d
assertTrue(i.hasNext());
list.remove(b);
assertEquals(asList(e, a, c, d), toList(list));
// post state: e a [b*] c d
list.remove(c);
assertEquals(asList(e, a, d), toList(list));
// post state: e a [b*] [c] d
assertTrue(i.hasNext());
assertEquals(d, i.next());
// post state: e a d
assertFalse(i.hasNext());
} catch (ConcurrentModificationException e) {
fail("mutation during iteration caused CME");
}
}
public void testHasNextDoesNotAffectNext() {
list.add(d);
list.add(c);
list.add(b);
list.add(a);
try {
Iterator<String> i = list.iterator();
assertTrue(i.hasNext());
assertTrue(i.hasNext());
assertTrue(i.hasNext());
assertEquals(a, i.next());
list.remove(b);
assertTrue(i.hasNext());
assertTrue(i.hasNext());
assertTrue(i.hasNext());
assertEquals(c, i.next());
assertTrue(i.hasNext());
assertTrue(i.hasNext());
assertTrue(i.hasNext());
assertEquals(d, i.next());
assertFalse(i.hasNext());
assertFalse(i.hasNext());
assertFalse(i.hasNext());
} catch (ConcurrentModificationException e) {
fail("mutation during iteration caused CME");
}
}
public void testNextMatchesHasNext() {
list.add(d);
list.add(c);
list.add(b);
list.add(a);
try {
Iterator<String> i = list.iterator();
// Even if we remove stuff, we expect next() to return what was pointed to at the time
// of hasNext().
// pre state: a* b c d
assertTrue(i.hasNext()); // a
list.remove(a);
assertEquals(a, i.next());
// post state: b* c d
assertTrue(i.hasNext()); // b
list.remove(b);
list.remove(c);
assertEquals(b, i.next());
// post state: d*
assertTrue(i.hasNext()); // d
assertEquals(d, i.next()); // d
} catch (ConcurrentModificationException e) {
fail("mutation during iteration caused CME");
}
}
public void testRemoveWhileIteratingAffectsIsEmpty() {
list.add(c);
list.add(b);
list.add(a);
try {
Iterator<String> i = list.iterator();
assertFalse(list.isEmpty());
// pre state: a* b c
list.remove(c);
i.next();
assertFalse(list.isEmpty());
// post state: a b*
// pre state: a b*
list.remove(b);
list.remove(a);
assertEquals(Collections.<String>emptyList(), toList(list));
assertFalse(i.hasNext());
assertTrue(list.isEmpty());
// post state: [b*]
} catch (ConcurrentModificationException e) {
fail("removal during iteration caused CME");
}
}
}
| |
/**
* Copyright (c) 2009/09-2012/08, Regents of the University of Colorado
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* Copyright 2012/09-2013/04, University of Massachusetts Amherst
* Copyright 2013/05-Present, IPSoft Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.clearnlp.clustering;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import com.carrotsearch.hppc.IntOpenHashSet;
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
import com.carrotsearch.hppc.cursors.IntCursor;
import com.googlecode.clearnlp.dependency.DEPTree;
import com.googlecode.clearnlp.pos.POSNode;
import com.googlecode.clearnlp.util.pair.IntDoublePair;
/**
* K-means clustering.
* @since 1.0.0
* @author Jinho D. Choi ({@code jdchoi77@gmail.com})
*/
public class Kmeans
{
private final int RAND_SEED = 0;
private int K, N, D;
private ObjectIntOpenHashMap<String> m_lexica;
private List<int[]> v_units;
private double[] d_centroid;
private double[] d_scala;
public Kmeans()
{
m_lexica = new ObjectIntOpenHashMap<String>();
v_units = new ArrayList<int[]>();
}
public void addUnit(Set<String> lexica)
{
int index, i = 0, size = lexica.size();
int[] unit = new int[size];
for (String lexicon : lexica)
{
if (m_lexica.containsKey(lexicon))
{
index = m_lexica.get(lexicon);
}
else
{
index = m_lexica.size();
m_lexica.put(lexicon, index);
}
unit[i++] = index;
}
Arrays.sort(unit);
v_units.add(unit);
}
public void addUnit(POSNode[] nodes)
{
Set<String> lexica = new HashSet<String>();
for (POSNode node : nodes)
lexica.add(node.lemma);
addUnit(lexica);
}
public void addUnit(DEPTree tree)
{
Set<String> lexica = new HashSet<String>();
int i, size = tree.size();
for (i=1; i<size; i++)
lexica.add(tree.get(i).lemma);
addUnit(lexica);
}
/**
* K-means clustering.
* @param threshold minimum RSS.
* @return each row represents a cluster, and
* each column represents a pair of (index of a unit vector, similarity to the centroid).
*/
public List<List<IntDoublePair>> cluster(int k, double threshold)
{
List<List<IntDoublePair>> currCluster = null;
List<List<IntDoublePair>> prevCluster = null;
double prevRss = -1, currRss;
K = k;
N = v_units.size();
D = m_lexica.size();
initCentroids();
int iter, max = N / K;
for (iter=0; iter<max; iter++)
{
System.out.printf("===== Iteration: %d =====\n", iter);
currCluster = getClusters();
updateCentroids(currCluster);
currRss = getRSS(currCluster);
if (prevRss >= currRss) return prevCluster;
if (currRss >= threshold) break;
prevRss = currRss;
prevCluster = currCluster;
}
return currCluster;
}
/** Initializes random centroids. */
private void initCentroids()
{
IntOpenHashSet set = new IntOpenHashSet();
Random rand = new Random(RAND_SEED);
d_centroid = new double[K*D];
d_scala = new double[K];
while (set.size() < K)
set.add(rand.nextInt(N));
int[] unit;
int k = 0;
for (IntCursor cur : set)
{
unit = v_units.get(cur.value);
for (int index : unit)
d_centroid[getCentroidIndex(k, index)] = 1;
d_scala[k++] = Math.sqrt(unit.length);
}
}
/** @return centroid of each cluster. */
private void updateCentroids(List<List<IntDoublePair>> cluster)
{
List<IntDoublePair> ck;
int i, k, size;
double scala;
Arrays.fill(d_centroid, 0);
Arrays.fill(d_scala , 0);
System.out.print("Updating centroids: ");
for (k=0; k<K; k++)
{
ck = cluster.get(k);
for (IntDoublePair p : ck)
{
for (int index : v_units.get(p.i))
d_centroid[getCentroidIndex(k, index)] += 1;
}
size = ck.size();
scala = 0;
for (i=k*D; i<(k+1)*D; i++)
{
if (d_centroid[i] > 0)
{
d_centroid[i] /= size;
scala += d_centroid[i] * d_centroid[i];
}
}
d_scala[k] = Math.sqrt(scala);
System.out.print(".");
}
System.out.println();
}
/** Each cluster contains indices of {@link Kmeans#v_units}. */
private List<List<IntDoublePair>> getClusters()
{
List<List<IntDoublePair>> cluster = new ArrayList<List<IntDoublePair>>(K);
IntDoublePair max = new IntDoublePair(-1, -1);
int[] unit;
int i, k; double sim;
for (k=0; k<K; k++)
cluster.add(new ArrayList<IntDoublePair>());
System.out.print("Clustering: ");
for (i=0; i<N; i++)
{
unit = v_units.get(i);
max.set(-1, -1);
for (k=0; k<K; k++)
{
if ((sim = cosine(unit, k)) > max.d)
max.set(k, sim);
}
cluster.get(max.i).add(new IntDoublePair(i, max.d));
if (i%10000 == 0) System.out.print(".");
}
System.out.println();
for (k=0; k<K; k++)
System.out.printf("- %4d: %d\n", k, cluster.get(k).size());
return cluster;
}
/**
* @param k [0, K-1].
* @param index [0, D-1].
*/
private int getCentroidIndex(int k, int index)
{
return k * D + index;
}
private double getRSS(List<List<IntDoublePair>> cluster)
{
double sim = 0;
System.out.print("Calulating RSS: ");
for (int k=0; k<K; k++)
{
for (IntDoublePair tup : cluster.get(k))
sim += cosine(v_units.get(tup.i), k);
System.out.print(".");
}
System.out.println();
sim /= N;
System.out.println("RSS = "+sim);
return sim / N;
}
private double cosine(int[] unit, int k)
{
double dot = 0;
for (int index : unit)
dot += d_centroid[getCentroidIndex(k, index)];
return dot / (Math.sqrt(unit.length) * d_scala[k]);
}
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.ide.intellij.deprecated;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import com.facebook.buck.ide.intellij.ProjectIntegrationTestUtils;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.ProjectWorkspace.ProcessResult;
import com.facebook.buck.testutil.integration.TemporaryPaths;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableSortedSet;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
public class DeprecatedProjectIntegrationTest {
@Rule
public TemporaryPaths temporaryFolder = new TemporaryPaths();
@Test
public void testBuckProject() throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project1",
"--deprecated-ij-generation",
"-v",
"5");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/libraries/buck_out_gen_libs___generated___generated_jar.xml",
".idea/libraries/buck_out_gen_libs___guava___guava_jar.xml",
".idea/libraries/buck_out_gen_libs___jsr305___jsr305_jar.xml",
".idea/libraries/buck_out_gen_libs___junit___junit_jar.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"modules/dep1/module_modules_dep1.iml",
"modules/tip/module_modules_tip.iml",
"root.iml"
) + '\n',
result.getStdout());
assertThat(
"`buck project` should contain warning to synchronize IntelliJ.",
result.getStderr(),
containsString(
" :: Please resynchronize IntelliJ via File->Synchronize " +
"or Cmd-Opt-Y (Mac) or Ctrl-Alt-Y (PC/Linux)"));
}
@Test
public void testBuckProjectWithGenruleAsASrc() throws IOException {
runBuckProjectAndVerify(
"project_with_genrule_as_a_src",
"--deprecated-ij-generation",
"-v",
"5");
}
@Test
public void testBuckProjectDoesNotCauseUnnecessaryWrites() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "project_with_root_iml_already_present", temporaryFolder);
workspace.setUp();
FileTime lastModified = FileTime.fromMillis(0); // We're gonna party like it's 1970!
Path path = temporaryFolder.getRoot().resolve("root.iml");
Files.setLastModifiedTime(path, lastModified);
assertEquals(lastModified, Files.getLastModifiedTime(path));
ProcessResult result = workspace.runBuckCommand(
"project",
"--deprecated-ij-generation");
result.assertSuccess("buck project should exit cleanly");
assertEquals(lastModified, Files.getLastModifiedTime(path));
workspace.verify();
}
@Test
public void testBuckProjectDryRun() throws IOException {
ProcessResult result = runBuckProjectWithoutVerification(
"project1",
"--deprecated-ij-generation",
"--dry-run",
"-v",
"5");
ImmutableSortedSet<String> expectedResult = ImmutableSortedSet.of(
"//:project_config",
"//:root_module",
"//libs:generated",
"//libs:generated_jar",
"//libs:generated_source_jar",
"//libs:guava",
"//libs:jsr305",
"//libs:junit",
"//modules/dep1:dep1",
"//modules/dep1:project_config",
"//modules/dep1:test",
"//modules/tip:project_config",
"//modules/tip:test",
"//modules/tip:tip");
ImmutableSortedSet<String> actualResult = ImmutableSortedSet.copyOf(
Splitter.on('\n').omitEmptyStrings().split(result.getStdout()));
assertEquals(
"`buck project --dry-run` should print the list of targets that would be included.",
expectedResult,
actualResult);
}
@Test
public void testBuckProjectExcludesSubdirectories() throws IOException {
runBuckProjectAndVerify("project2", "--deprecated-ij-generation");
}
/**
* Verify that if we build a project by specifying a target, the resulting project only contains
* the transitive deps of that target. In this example, that means everything except
* //modules/tip and //tests/tests.
*/
@Test
public void testBuckProjectSlice() throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project_slice",
"--deprecated-ij-generation",
"--without-tests",
"//modules/dep1:dep1",
"//:root",
"-v", "5");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/libraries/buck_out_gen_libs___guava___guava_jar.xml",
".idea/libraries/buck_out_gen_libs___jsr305___jsr305_jar.xml",
".idea/libraries/buck_out_gen_libs___junit___junit_jar.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"module_.iml",
"modules/dep1/module_modules_dep1.iml"
) + '\n',
result.getStdout());
assertThat(
"`buck project` should contain warning to synchronize IntelliJ.",
result.getStderr(),
containsString(
" :: Please resynchronize IntelliJ via File->Synchronize " +
"or Cmd-Opt-Y (Mac) or Ctrl-Alt-Y (PC/Linux)"));
}
@Test
public void testBuckProjectSliceDryRun() throws IOException {
ProcessResult result = runBuckProjectWithoutVerification(
"project_slice",
"--deprecated-ij-generation",
"--dry-run",
"--without-tests",
"//modules/dep1:dep1",
"//:root",
"-v", "5");
ImmutableSortedSet<String> expectedResult = ImmutableSortedSet.of(
"//:project_config",
"//:root",
"//libs:guava",
"//libs:jsr305",
"//libs:junit",
"//modules/dep1:dep1",
"//modules/dep1:project_config",
"//modules/dep1:test");
ImmutableSortedSet<String> actualResult = ImmutableSortedSet.copyOf(
Splitter.on('\n').omitEmptyStrings().split(result.getStdout()));
assertEquals(
"`buck project --dry-run` should print the list of targets that would be included.",
expectedResult,
actualResult);
}
/**
* Verify we can build a project by specifying a target, even if it depends on a target whose
* project is not in the same buck file as the targets it's for.
*/
@Test
public void testBuckProjectSliceWithProjectInDifferentBuckFile() throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project_slice_with_project_in_different_buck_file",
"--deprecated-ij-generation",
"//:root",
"-v",
"5");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"module_.iml",
"modules/module_modules_dep1.iml"
) + '\n',
result.getStdout());
assertThat(
"`buck project` should contain warning to synchronize IntelliJ.",
result.getStderr(),
containsString(
" :: Please resynchronize IntelliJ via File->Synchronize " +
"or Cmd-Opt-Y (Mac) or Ctrl-Alt-Y (PC/Linux)"));
}
/**
* Verify that if we build a project by specifying a target, the resulting project only contains
* the transitive deps of that target as well as any tests that specify something in those
* transitive deps as "sources_under_test". In this example, that means everything except
* //modules/tip.
*/
@Test
public void testBuckProjectSliceWithTests() throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project_slice_with_tests",
"--deprecated-ij-generation",
"//modules/dep1:dep1",
"-v", "5");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/libraries/buck_out_gen_libs___guava___guava_jar.xml",
".idea/libraries/buck_out_gen_libs___jsr305___jsr305_jar.xml",
".idea/libraries/buck_out_gen_libs___junit___junit_jar.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"modules/dep1/module_modules_dep1.iml",
"tests/module_tests.iml"
) + '\n',
result.getStdout());
assertThat(
"`buck project` should contain warning to synchronize IntelliJ.",
result.getStderr(),
containsString(
" :: Please resynchronize IntelliJ via File->Synchronize " +
"or Cmd-Opt-Y (Mac) or Ctrl-Alt-Y (PC/Linux)"));
}
@Test
public void testBuckProjectSliceWithTestsDryRunShowsNoTests() throws IOException {
ProcessResult result = runBuckProjectWithoutVerification(
"project_slice_with_tests",
"--deprecated-ij-generation",
"--dry-run",
"--without-tests",
"//modules/dep1:dep1",
"-v", "5");
ImmutableSortedSet<String> expectedResult = ImmutableSortedSet.of(
"//libs:guava",
"//libs:jsr305",
"//libs:junit",
"//modules/dep1:dep1",
"//modules/dep1:project_config",
"//modules/dep1:test");
ImmutableSortedSet<String> actualResult = ImmutableSortedSet.copyOf(
Splitter.on('\n').omitEmptyStrings().split(result.getStdout()));
assertEquals(
"`buck project --dry-run` should print the list of targets that would be included.",
expectedResult,
actualResult);
}
/**
* Verify that if we build a project by specifying a target, the tests dependencies are
* referenced even if they are defined in a buck file that would not have been parsed otherwise.
*/
@Test
public void testBuckProjectSliceWithTestsDependenciesInDifferentBuckFile() throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project_slice_with_tests_dependencies_in_different_buck_file",
"--deprecated-ij-generation",
"//modules/dep1:dep1",
"-v", "5");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"modules/dep1/module_modules_dep1.iml",
"modules/dep2/module_modules_dep2.iml",
"tests/module_tests.iml"
) + '\n',
result.getStdout());
assertThat(
"`buck project` should contain warning to synchronize IntelliJ.",
result.getStderr(),
containsString(
" :: Please resynchronize IntelliJ via File->Synchronize " +
"or Cmd-Opt-Y (Mac) or Ctrl-Alt-Y (PC/Linux)"));
}
/**
* Verify that if we build a project by specifying a target, the tests' projects rules are
* referenced even if they are defined in a different buck file from the tests.
*/
@Test
public void testBuckProjectSliceWithTestsProjectInDifferentBuckFile() throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project_slice_with_tests_project_in_different_buck_file",
"--deprecated-ij-generation",
"//modules/dep1:dep1",
"-v", "5");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"modules/dep1/module_modules_dep1.iml",
"tests/module_tests_test1.iml"
) + '\n',
result.getStdout());
assertThat(
"`buck project` should contain warning to synchronize IntelliJ.",
result.getStderr(),
containsString(
" :: Please resynchronize IntelliJ via File->Synchronize " +
"or Cmd-Opt-Y (Mac) or Ctrl-Alt-Y (PC/Linux)"));
}
/**
* Tests the case where a build file has a test rule that depends on a library rule in the same
* build file, and the test rule is specified as the {@code test_target} in its
* {@code project_config()}. When this happens, all libraries in the generated {@code .iml} file
* should be listed before any of the modules.
* <p>
* This prevents a regression where JUnit was not being loaded early enough in the classpath,
* which led to a "JUnit version 3.8 or later expected" error when running tests in IntelliJ.
* (Presumably, IntelliJ ended up loading JUnit 3 from android.jar instead of loading JUnit 4
* from the version of JUnit checked into version control.)
*/
@Test
public void testBuckProjectWithMultipleLibrariesInOneBuildFile() throws IOException {
runBuckProjectAndVerify(
"buck_project_multiple_libraries_in_one_build_file",
"--deprecated-ij-generation");
}
@Test
public void testProjectWithColon() throws IOException {
runBuckProjectWithoutVerification(
"project1",
"--deprecated-ij-generation",
"//modules/dep1:");
}
@Test
public void testNonexistentTarget() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"project1",
temporaryFolder);
workspace.setUp();
ProcessResult result = workspace.runBuckCommand(
"project",
"--deprecated-ij-generation",
"//modules/dep1:nonexistent-target");
result.assertFailure("No rule found when resolving target " +
"//modules/dep1:nonexistent-target in build file //modules/dep1/BUCK");
}
@Test
public void testNonexistentBuckFile() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this,
"project1",
temporaryFolder);
workspace.setUp();
ProcessResult result = workspace.runBuckCommand(
"project",
"--deprecated-ij-generation",
"//nonexistent/path:target");
result.assertFailure("No build file at nonexistent/path/BUCK " +
"when resolving target //nonexistent/path:target.");
}
@Test
public void testBuckProjectGeneratedWithRDotFiles001() throws IOException {
runBuckProjectAndVerify(
"project_r_001",
"--deprecated-ij-generation",
"app");
}
@Test
public void testBuckProjectGeneratedWithRDotFiles002() throws IOException {
runBuckProjectAndVerify(
"project_r_002",
"--deprecated-ij-generation",
"--disable-r-java-idea-generator",
"app");
}
@Test
public void testBuckProjectWithAndroidBinary() throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project_with_android_binary",
"--deprecated-ij-generation",
"-v",
"5");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"apps/sample/module_apps_sample.iml",
"java/com/sample/lib/module_java_com_sample_lib.iml",
"res/com/sample/asset_only/module_res_com_sample_asset_only.iml",
"res/com/sample/base/module_res_com_sample_base.iml",
"res/com/sample/title/module_res_com_sample_title.iml",
"res/com/sample/top/module_res_com_sample_top.iml"
) + '\n',
result.getStdout());
}
@Test
public void testBuckProjectSliceWithAndroidBinary() throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project_with_android_binary",
"--deprecated-ij-generation",
"-v", "5",
"//apps/sample:app");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"apps/sample/module_apps_sample.iml",
"java/com/sample/lib/module_java_com_sample_lib.iml",
"res/com/sample/asset_only/module_res_com_sample_asset_only.iml",
"res/com/sample/base/module_res_com_sample_base.iml",
"res/com/sample/title/module_res_com_sample_title.iml",
"res/com/sample/top/module_res_com_sample_top.iml"
) + '\n',
result.getStdout());
}
@Test
public void testBuckProjectWithAndroidBinaryWithRDotJavaAutogenerationDisabled()
throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project_with_android_binary_autogeneration_disabled",
"--deprecated-ij-generation",
"--disable-r-java-idea-generator",
"-v", "5");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"apps/sample/module_apps_sample.iml",
"java/com/sample/lib/module_java_com_sample_lib.iml",
"res/com/sample/asset_only/module_res_com_sample_asset_only.iml",
"res/com/sample/base/module_res_com_sample_base.iml",
"res/com/sample/title/module_res_com_sample_title.iml",
"res/com/sample/top/module_res_com_sample_top.iml"
) + '\n',
result.getStdout());
}
@Test
public void testBuckProjectSliceWithAndroidBinaryWithRDotJavaAutogenerationDisabled()
throws IOException {
ProcessResult result = runBuckProjectAndVerify(
"project_with_android_binary_autogeneration_disabled",
"--deprecated-ij-generation",
"--disable-r-java-idea-generator",
"//apps/sample:app",
"-v", "5");
assertEquals(
"`buck project` should report the files it modified.",
Joiner.on('\n').join(
"MODIFIED FILES:",
".idea/compiler.xml",
".idea/misc.xml",
".idea/modules.xml",
".idea/runConfigurations/Debug_Buck_test.xml",
"apps/sample/module_apps_sample.iml",
"java/com/sample/lib/module_java_com_sample_lib.iml",
"res/com/sample/asset_only/module_res_com_sample_asset_only.iml",
"res/com/sample/base/module_res_com_sample_base.iml",
"res/com/sample/title/module_res_com_sample_title.iml",
"res/com/sample/top/module_res_com_sample_top.iml"
) + '\n',
result.getStdout());
}
@Test
public void testAndroidProjectGeneratedWithGradleConventions() throws IOException {
runBuckProjectAndVerify(
"android_project_with_gradle_conventions",
"--deprecated-ij-generation",
"app");
}
private ProcessResult runBuckProjectAndVerify(
String folderWithTestData,
String... commandArgs) throws IOException {
return ProjectIntegrationTestUtils.runBuckProject(
this,
temporaryFolder,
folderWithTestData,
true,
commandArgs);
}
private ProcessResult runBuckProjectWithoutVerification(
String folderWithTestData,
String... commandArgs) throws IOException {
return ProjectIntegrationTestUtils.runBuckProject(
this,
temporaryFolder,
folderWithTestData,
false,
commandArgs);
}
}
| |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.elasticache.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* Container for the parameters to the {@link com.amazonaws.services.elasticache.AmazonElastiCache#describeCacheEngineVersions(DescribeCacheEngineVersionsRequest) DescribeCacheEngineVersions operation}.
* <p>
* The <i>DescribeCacheEngineVersions</i> action returns a list of the
* available cache engines and their versions.
* </p>
*
* @see com.amazonaws.services.elasticache.AmazonElastiCache#describeCacheEngineVersions(DescribeCacheEngineVersionsRequest)
*/
public class DescribeCacheEngineVersionsRequest extends AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* The cache engine to return. Valid values: <code>memcached</code> |
* <code>redis</code>
*/
private String engine;
/**
* The cache engine version to return. <p>Example: <code>1.4.14</code>
*/
private String engineVersion;
/**
* The name of a specific cache parameter group family to return details
* for. <p>Constraints: <ul> <li>Must be 1 to 255 alphanumeric
* characters</li> <li>First character must be a letter</li> <li>Cannot
* end with a hyphen or contain two consecutive hyphens</li> </ul>
*/
private String cacheParameterGroupFamily;
/**
* The maximum number of records to include in the response. If more
* records exist than the specified <code>MaxRecords</code> value, a
* marker is included in the response so that the remaining results can
* be retrieved. <p>Default: 100 <p>Constraints: minimum 20; maximum 100.
*/
private Integer maxRecords;
/**
* An optional marker returned from a prior request. Use this marker for
* pagination of results from this action. If this parameter is
* specified, the response includes only records beyond the marker, up to
* the value specified by <i>MaxRecords</i>.
*/
private String marker;
/**
* If <i>true</i>, specifies that only the default version of the
* specified engine or engine and major version combination is to be
* returned.
*/
private Boolean defaultOnly;
/**
* The cache engine to return. Valid values: <code>memcached</code> |
* <code>redis</code>
*
* @return The cache engine to return. Valid values: <code>memcached</code> |
* <code>redis</code>
*/
public String getEngine() {
return engine;
}
/**
* The cache engine to return. Valid values: <code>memcached</code> |
* <code>redis</code>
*
* @param engine The cache engine to return. Valid values: <code>memcached</code> |
* <code>redis</code>
*/
public void setEngine(String engine) {
this.engine = engine;
}
/**
* The cache engine to return. Valid values: <code>memcached</code> |
* <code>redis</code>
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param engine The cache engine to return. Valid values: <code>memcached</code> |
* <code>redis</code>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeCacheEngineVersionsRequest withEngine(String engine) {
this.engine = engine;
return this;
}
/**
* The cache engine version to return. <p>Example: <code>1.4.14</code>
*
* @return The cache engine version to return. <p>Example: <code>1.4.14</code>
*/
public String getEngineVersion() {
return engineVersion;
}
/**
* The cache engine version to return. <p>Example: <code>1.4.14</code>
*
* @param engineVersion The cache engine version to return. <p>Example: <code>1.4.14</code>
*/
public void setEngineVersion(String engineVersion) {
this.engineVersion = engineVersion;
}
/**
* The cache engine version to return. <p>Example: <code>1.4.14</code>
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param engineVersion The cache engine version to return. <p>Example: <code>1.4.14</code>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeCacheEngineVersionsRequest withEngineVersion(String engineVersion) {
this.engineVersion = engineVersion;
return this;
}
/**
* The name of a specific cache parameter group family to return details
* for. <p>Constraints: <ul> <li>Must be 1 to 255 alphanumeric
* characters</li> <li>First character must be a letter</li> <li>Cannot
* end with a hyphen or contain two consecutive hyphens</li> </ul>
*
* @return The name of a specific cache parameter group family to return details
* for. <p>Constraints: <ul> <li>Must be 1 to 255 alphanumeric
* characters</li> <li>First character must be a letter</li> <li>Cannot
* end with a hyphen or contain two consecutive hyphens</li> </ul>
*/
public String getCacheParameterGroupFamily() {
return cacheParameterGroupFamily;
}
/**
* The name of a specific cache parameter group family to return details
* for. <p>Constraints: <ul> <li>Must be 1 to 255 alphanumeric
* characters</li> <li>First character must be a letter</li> <li>Cannot
* end with a hyphen or contain two consecutive hyphens</li> </ul>
*
* @param cacheParameterGroupFamily The name of a specific cache parameter group family to return details
* for. <p>Constraints: <ul> <li>Must be 1 to 255 alphanumeric
* characters</li> <li>First character must be a letter</li> <li>Cannot
* end with a hyphen or contain two consecutive hyphens</li> </ul>
*/
public void setCacheParameterGroupFamily(String cacheParameterGroupFamily) {
this.cacheParameterGroupFamily = cacheParameterGroupFamily;
}
/**
* The name of a specific cache parameter group family to return details
* for. <p>Constraints: <ul> <li>Must be 1 to 255 alphanumeric
* characters</li> <li>First character must be a letter</li> <li>Cannot
* end with a hyphen or contain two consecutive hyphens</li> </ul>
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param cacheParameterGroupFamily The name of a specific cache parameter group family to return details
* for. <p>Constraints: <ul> <li>Must be 1 to 255 alphanumeric
* characters</li> <li>First character must be a letter</li> <li>Cannot
* end with a hyphen or contain two consecutive hyphens</li> </ul>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeCacheEngineVersionsRequest withCacheParameterGroupFamily(String cacheParameterGroupFamily) {
this.cacheParameterGroupFamily = cacheParameterGroupFamily;
return this;
}
/**
* The maximum number of records to include in the response. If more
* records exist than the specified <code>MaxRecords</code> value, a
* marker is included in the response so that the remaining results can
* be retrieved. <p>Default: 100 <p>Constraints: minimum 20; maximum 100.
*
* @return The maximum number of records to include in the response. If more
* records exist than the specified <code>MaxRecords</code> value, a
* marker is included in the response so that the remaining results can
* be retrieved. <p>Default: 100 <p>Constraints: minimum 20; maximum 100.
*/
public Integer getMaxRecords() {
return maxRecords;
}
/**
* The maximum number of records to include in the response. If more
* records exist than the specified <code>MaxRecords</code> value, a
* marker is included in the response so that the remaining results can
* be retrieved. <p>Default: 100 <p>Constraints: minimum 20; maximum 100.
*
* @param maxRecords The maximum number of records to include in the response. If more
* records exist than the specified <code>MaxRecords</code> value, a
* marker is included in the response so that the remaining results can
* be retrieved. <p>Default: 100 <p>Constraints: minimum 20; maximum 100.
*/
public void setMaxRecords(Integer maxRecords) {
this.maxRecords = maxRecords;
}
/**
* The maximum number of records to include in the response. If more
* records exist than the specified <code>MaxRecords</code> value, a
* marker is included in the response so that the remaining results can
* be retrieved. <p>Default: 100 <p>Constraints: minimum 20; maximum 100.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param maxRecords The maximum number of records to include in the response. If more
* records exist than the specified <code>MaxRecords</code> value, a
* marker is included in the response so that the remaining results can
* be retrieved. <p>Default: 100 <p>Constraints: minimum 20; maximum 100.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeCacheEngineVersionsRequest withMaxRecords(Integer maxRecords) {
this.maxRecords = maxRecords;
return this;
}
/**
* An optional marker returned from a prior request. Use this marker for
* pagination of results from this action. If this parameter is
* specified, the response includes only records beyond the marker, up to
* the value specified by <i>MaxRecords</i>.
*
* @return An optional marker returned from a prior request. Use this marker for
* pagination of results from this action. If this parameter is
* specified, the response includes only records beyond the marker, up to
* the value specified by <i>MaxRecords</i>.
*/
public String getMarker() {
return marker;
}
/**
* An optional marker returned from a prior request. Use this marker for
* pagination of results from this action. If this parameter is
* specified, the response includes only records beyond the marker, up to
* the value specified by <i>MaxRecords</i>.
*
* @param marker An optional marker returned from a prior request. Use this marker for
* pagination of results from this action. If this parameter is
* specified, the response includes only records beyond the marker, up to
* the value specified by <i>MaxRecords</i>.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* An optional marker returned from a prior request. Use this marker for
* pagination of results from this action. If this parameter is
* specified, the response includes only records beyond the marker, up to
* the value specified by <i>MaxRecords</i>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param marker An optional marker returned from a prior request. Use this marker for
* pagination of results from this action. If this parameter is
* specified, the response includes only records beyond the marker, up to
* the value specified by <i>MaxRecords</i>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeCacheEngineVersionsRequest withMarker(String marker) {
this.marker = marker;
return this;
}
/**
* If <i>true</i>, specifies that only the default version of the
* specified engine or engine and major version combination is to be
* returned.
*
* @return If <i>true</i>, specifies that only the default version of the
* specified engine or engine and major version combination is to be
* returned.
*/
public Boolean isDefaultOnly() {
return defaultOnly;
}
/**
* If <i>true</i>, specifies that only the default version of the
* specified engine or engine and major version combination is to be
* returned.
*
* @param defaultOnly If <i>true</i>, specifies that only the default version of the
* specified engine or engine and major version combination is to be
* returned.
*/
public void setDefaultOnly(Boolean defaultOnly) {
this.defaultOnly = defaultOnly;
}
/**
* If <i>true</i>, specifies that only the default version of the
* specified engine or engine and major version combination is to be
* returned.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param defaultOnly If <i>true</i>, specifies that only the default version of the
* specified engine or engine and major version combination is to be
* returned.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeCacheEngineVersionsRequest withDefaultOnly(Boolean defaultOnly) {
this.defaultOnly = defaultOnly;
return this;
}
/**
* If <i>true</i>, specifies that only the default version of the
* specified engine or engine and major version combination is to be
* returned.
*
* @return If <i>true</i>, specifies that only the default version of the
* specified engine or engine and major version combination is to be
* returned.
*/
public Boolean getDefaultOnly() {
return defaultOnly;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEngine() != null) sb.append("Engine: " + getEngine() + ",");
if (getEngineVersion() != null) sb.append("EngineVersion: " + getEngineVersion() + ",");
if (getCacheParameterGroupFamily() != null) sb.append("CacheParameterGroupFamily: " + getCacheParameterGroupFamily() + ",");
if (getMaxRecords() != null) sb.append("MaxRecords: " + getMaxRecords() + ",");
if (getMarker() != null) sb.append("Marker: " + getMarker() + ",");
if (isDefaultOnly() != null) sb.append("DefaultOnly: " + isDefaultOnly() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEngine() == null) ? 0 : getEngine().hashCode());
hashCode = prime * hashCode + ((getEngineVersion() == null) ? 0 : getEngineVersion().hashCode());
hashCode = prime * hashCode + ((getCacheParameterGroupFamily() == null) ? 0 : getCacheParameterGroupFamily().hashCode());
hashCode = prime * hashCode + ((getMaxRecords() == null) ? 0 : getMaxRecords().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
hashCode = prime * hashCode + ((isDefaultOnly() == null) ? 0 : isDefaultOnly().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof DescribeCacheEngineVersionsRequest == false) return false;
DescribeCacheEngineVersionsRequest other = (DescribeCacheEngineVersionsRequest)obj;
if (other.getEngine() == null ^ this.getEngine() == null) return false;
if (other.getEngine() != null && other.getEngine().equals(this.getEngine()) == false) return false;
if (other.getEngineVersion() == null ^ this.getEngineVersion() == null) return false;
if (other.getEngineVersion() != null && other.getEngineVersion().equals(this.getEngineVersion()) == false) return false;
if (other.getCacheParameterGroupFamily() == null ^ this.getCacheParameterGroupFamily() == null) return false;
if (other.getCacheParameterGroupFamily() != null && other.getCacheParameterGroupFamily().equals(this.getCacheParameterGroupFamily()) == false) return false;
if (other.getMaxRecords() == null ^ this.getMaxRecords() == null) return false;
if (other.getMaxRecords() != null && other.getMaxRecords().equals(this.getMaxRecords()) == false) return false;
if (other.getMarker() == null ^ this.getMarker() == null) return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false;
if (other.isDefaultOnly() == null ^ this.isDefaultOnly() == null) return false;
if (other.isDefaultOnly() != null && other.isDefaultOnly().equals(this.isDefaultOnly()) == false) return false;
return true;
}
@Override
public DescribeCacheEngineVersionsRequest clone() {
return (DescribeCacheEngineVersionsRequest) super.clone();
}
}
| |
package eu.fbk.fm.alignments;
import com.google.common.base.Stopwatch;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import eu.fbk.fm.alignments.evaluation.Dataset;
import eu.fbk.fm.alignments.evaluation.DatasetEntry;
import eu.fbk.fm.alignments.index.FillFromIndex;
import eu.fbk.fm.alignments.persistence.sparql.Endpoint;
import eu.fbk.fm.alignments.query.QueryAssemblyStrategy;
import eu.fbk.fm.alignments.query.index.AllNamesStrategy;
import eu.fbk.fm.alignments.scorer.*;
import eu.fbk.fm.alignments.scorer.text.LSAVectorProvider;
import eu.fbk.fm.alignments.scorer.text.MemoryEmbeddingsProvider;
import eu.fbk.fm.alignments.scorer.text.VectorProvider;
import eu.fbk.fm.alignments.twitter.TwitterCredentials;
import eu.fbk.fm.alignments.twitter.TwitterDeserializer;
import eu.fbk.fm.alignments.utils.DBUtils;
import eu.fbk.utils.lsa.LSM;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.sql.DataSource;
import java.io.*;
import java.lang.reflect.Type;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.zip.GZIPInputStream;
import static eu.fbk.fm.alignments.PrepareTrainingSet.RESOLVE_CHUNK_SIZE;
import static eu.fbk.fm.alignments.scorer.TextScorer.DBPEDIA_TEXT_EXTRACTOR;
/**
* A sibling to PrepareTrainingSet but tailored towards inference instead of full training+evaluation pipeline
*
* @author Yaroslav Nechaev (remper@me.com)
*/
public class ProcessDataset {
private static final Logger logger = LoggerFactory.getLogger(ProcessDataset.class);
public static void main(String[] args) throws Exception {
Gson gson = TwitterDeserializer.getDefault().getBuilder().create();
PrepareTrainingSet.Configuration configuration = PrepareTrainingSet.loadConfiguration(args);
if (configuration == null) {
return;
}
if (configuration.dbConnection == null || configuration.dbUser == null || configuration.dbPassword == null) {
logger.error("DB credentials are not specified");
return;
}
logger.info(String.format("Options %s", gson.toJson(configuration)));
QueryAssemblyStrategy qaStrategy = new AllNamesStrategy();
Endpoint endpoint = new Endpoint(configuration.endpoint);
PrepareTrainingSet prepareTrainingSet;
DataSource source = DBUtils.createHikariDataSource(configuration.dbConnection, configuration.dbUser, configuration.dbPassword);
prepareTrainingSet = new PrepareTrainingSet(new FillFromIndex(endpoint, qaStrategy, source));
if (configuration.lsa == null) {
logger.info("LSA is not specified. Stopping");
return;
}
PAI18Strategy strategy = new PAI18Strategy(source);
LSM lsm = new LSM(configuration.lsa + "/X", 100, true);
VectorProvider textVectorProvider = new LSAVectorProvider(lsm);
List<VectorProvider> allVectorProviders = new LinkedList<>();
allVectorProviders.add(textVectorProvider);
if (configuration.embeddings != null) {
LinkedList<VectorProvider> embProviders = new LinkedList<>();
Files.list(Paths.get(configuration.embeddings)).forEach((path) -> {
try {
embProviders.add(new MemoryEmbeddingsProvider(path.toString(), configuration.lsa));
} catch (Exception e) {
logger.error("Error while loading embedding", e);
}
});
logger.info("Loaded {} embedding models", embProviders.size());
allVectorProviders.addAll(embProviders);
}
strategy.addProvider(ISWC17Strategy.builder().vectorProviders(allVectorProviders).build());
prepareTrainingSet.setScoreStrategy(strategy);
//prepareTrainingSet.setScoreStrategy(new PAI18Strategy(new LinkedList<>()));
//prepareTrainingSet.setScoreStrategy(new SMTStrategy(source, configuration.lsa));
FileProvider files = new FileProvider(configuration.workdir);
if (!files.input.exists()) {
logger.error("Input dataset doesn't exist");
return;
}
//Loading full input dataset
Dataset inputDataset = Dataset.fromFile(files.input);
if (configuration.credentials != null) {
prepareTrainingSet.setCredentials(TwitterCredentials.credentialsFromFile(new File(configuration.credentials)));
}
//Resolving all the data needed for analysis
int curLastIndex = -1;
int totalChunks = (int) Math.ceil((float) inputDataset.size() / RESOLVE_CHUNK_SIZE);
if (files.resolved.exists()) {
File[] resolveChunks = files.resolved.listFiles();
if (resolveChunks == null) {
logger.error("Something wrong with the resolve directory");
return;
}
for (File file : resolveChunks) {
if (!file.getName().endsWith(".gz")) {
continue;
}
int index = Integer.valueOf(file.getName().substring(0, file.getName().indexOf('.')));
if (index > curLastIndex) {
curLastIndex = index;
}
}
} else {
if (!files.resolved.mkdir()) {
logger.error("Can't create directory for the resolved info");
return;
}
}
curLastIndex++;
if (curLastIndex < totalChunks) {
int toSkip = curLastIndex;
logger.info(String.format(
"Resolving all data from the dataset (%d entries, skip chunks: %d, projected chunks: %d)",
inputDataset.size(), toSkip, totalChunks
));
logger.info("Query strategy: " + prepareTrainingSet.getQaStrategy().getClass().getSimpleName());
Dataset curDataset = new Dataset();
for (DatasetEntry entry : inputDataset) {
curDataset.add(entry);
if (curDataset.size() > RESOLVE_CHUNK_SIZE) {
if (toSkip == 0) {
prepareTrainingSet.resolveAndSaveDatasetChunk(curDataset, curLastIndex, files);
curLastIndex++;
} else {
toSkip--;
}
curDataset = new Dataset();
}
}
if (curDataset.size() > 0) {
prepareTrainingSet.resolveAndSaveDatasetChunk(curDataset, curLastIndex, files);
}
}
// Deserializing the entire collection
final List<FullyResolvedEntry> resolveDataset = new LinkedList<>();
logger.info("Deserialising user data and generating features");
List<File> resolveChunks = Arrays.asList(Optional.ofNullable(files.resolved.listFiles()).orElse(new File[0]));
if (resolveChunks.size() == 0) {
logger.error("Can't deserialize user data");
return;
}
AtomicInteger chunksLeft = new AtomicInteger(resolveChunks.size());
AtomicInteger dead = new AtomicInteger(0);
AtomicInteger empty = new AtomicInteger(0);
resolveChunks.forEach(file -> {
Stopwatch watch = Stopwatch.createStarted();
Reader reader;
try {
reader = new InputStreamReader(new GZIPInputStream(new FileInputStream(file)));
} catch (IOException e) {
throw new RuntimeException("Can't open file " + file);
}
Type type = new TypeToken<List<FullyResolvedEntry>>() {
}.getType();
List<FullyResolvedEntry> entries = gson.fromJson(reader, type);
logger.info(String.format(
"Chunk %s deserialized in %.2f seconds",
file.getName(),
(double) watch.elapsed(TimeUnit.MILLISECONDS) / 1000
));
watch.reset().start();
List<FullyResolvedEntry> filteredChunk = entries
.stream()
.filter(entry -> !entry.resource.isDead()).collect(Collectors.toList());
dead.addAndGet(entries.size()-filteredChunk.size());
prepareTrainingSet.generateFeatures(entries);
prepareTrainingSet.purgeAdditionalData(entries);
filteredChunk = filteredChunk
.stream()
.filter(entry -> entry.candidates.size() > 0)
.collect(Collectors.toList());
empty.addAndGet(entries.size()-filteredChunk.size());
synchronized (resolveDataset) {
resolveDataset.addAll(filteredChunk);
}
IOUtils.closeQuietly(reader);
logger.info(String.format(
"Chunk %s completed in %.2f seconds (left %2d)",
file.getName(),
(double) watch.elapsed(TimeUnit.MILLISECONDS) / 1000,
chunksLeft.decrementAndGet()
));
});
try {
int numCandidates = resolveDataset
.stream()
.map(entry -> entry.candidates.size())
.reduce(0, (i1, i2) -> i1+i2);
logger.info("Dataset statistics: ");
logger.info(" Items before resolving:\t" + inputDataset.size());
logger.info(" Items after resolving:\t" + resolveDataset.size());
int lost = inputDataset.size() - resolveDataset.size();
logger.info(String.format(
" Lost:\t%d (%.2f%%, dead: %d, empty: %d)",
lost, ((double) lost / inputDataset.size()) * 100,
dead.get(), empty.get()-dead.get()
));
logger.info(String.format(" Average candidates per entity: %.2f", (double) numCandidates / resolveDataset.size()));
Stopwatch watch = Stopwatch.createStarted();
logger.info("Dumping full experimental setting to JSON");
FileWriter resolvedWriter = new FileWriter(files.dataset);
boolean first = true;
for (FullyResolvedEntry entry : resolveDataset) {
if (first) {
first = false;
} else {
resolvedWriter.write('\n');
}
gson.toJson(entry, resolvedWriter);
}
IOUtils.closeQuietly(resolvedWriter);
logger.info(String.format("Complete in %.2f seconds", (double) watch.elapsed(TimeUnit.MILLISECONDS) / 1000));
} catch (Exception e) {
logger.error("Error while processing pipeline", e);
e.printStackTrace();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.internal.cache;
import com.gemstone.gemfire.CancelException;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.server.CacheServer;
import com.gemstone.gemfire.cache.server.ClientSubscriptionConfig;
import com.gemstone.gemfire.cache.server.ServerLoadProbe;
import com.gemstone.gemfire.distributed.DistributedMember;
import com.gemstone.gemfire.distributed.internal.DM;
import com.gemstone.gemfire.distributed.internal.InternalDistributedSystem;
import com.gemstone.gemfire.internal.admin.ClientMembershipMessage;
import com.gemstone.gemfire.internal.cache.xmlcache.CacheCreation;
import com.gemstone.gemfire.management.membership.ClientMembership;
import com.gemstone.gemfire.management.membership.ClientMembershipEvent;
import com.gemstone.gemfire.management.membership.ClientMembershipListener;
import java.io.IOException;
import java.util.Arrays;
import java.util.Set;
/**
* Abstract class that contains common code that all true implementations
* of {@link CacheServer} can use.
*
* @author darrel
* @since 5.7
*/
public abstract class AbstractCacheServer implements CacheServer {
public static final String TEST_OVERRIDE_DEFAULT_PORT_PROPERTY = "gemfire.test.CacheServer.OVERRIDE_DEFAULT_PORT";
/** The cache that is served by this bridge server */
protected final InternalCache cache;
/** The port that the bridge server was configured to run on */
protected int port;
/** The maximum number of connections that the BridgeServer will accept */
protected int maxConnections;
/** The maximum number of threads that the BridgeServer will create */
protected int maxThreads;
/** Whether the bridge server notifies by subscription */
protected boolean notifyBySubscription = true;
/**
* The buffer size in bytes of the socket for this
* <code>BridgeServer</code>
*/
protected int socketBufferSize;
/**
* The tcpNoDelay setting for outgoing sockets
*/
protected boolean tcpNoDelay;
/**
* The maximum amount of time between client pings. This value is used by
* the <code>ClientHealthMonitor</code> to determine the health of this
* <code>BridgeServer</code>'s clients.
*/
protected int maximumTimeBetweenPings;
/** the maximum number of messages that can be enqueued in a client-queue. */
protected int maximumMessageCount;
/**
* the time (in seconds) after which a message in the client queue will
* expire.
*/
protected int messageTimeToLive;
/**
* The groups this server belongs to. Use <code>getGroups</code> to read.
* @since 5.7
*/
protected String[] groups;
protected ServerLoadProbe loadProbe;
/**
* The ip address or host name that this server is to listen on.
* @since 5.7
*/
protected String bindAddress;
/**
* The ip address or host name that will be given to clients so they can connect
* to this server
* @since 5.7
*/
protected String hostnameForClients;
/**
* How frequency to poll the load on this server.
*/
protected long loadPollInterval;
protected ClientSubscriptionConfig clientSubscriptionConfig;
/**
* Listens to client membership events and notifies any admin
* members as clients of this server leave/crash.
*/
protected final ClientMembershipListener listener;
/**
* The number of seconds to keep transaction states for disconnected clients.
* This allows the client to fail over to another server and still find
* the transaction state to complete the transaction.
*/
private int transactionTimeToLive;
////////////////////// Constructors //////////////////////
/**
* Creates a new <code>BridgeServer</code> with the default
* configuration.
*
* @param cache
* The cache being served
*/
public AbstractCacheServer(InternalCache cache) {
this(cache, true);
}
public AbstractCacheServer(InternalCache cache, boolean attachListener) {
this.cache = cache;
this.port = Integer.getInteger(TEST_OVERRIDE_DEFAULT_PORT_PROPERTY, CacheServer.DEFAULT_PORT);
this.maxConnections = CacheServer.DEFAULT_MAX_CONNECTIONS;
this.maxThreads = CacheServer.DEFAULT_MAX_THREADS;
this.socketBufferSize = CacheServer.DEFAULT_SOCKET_BUFFER_SIZE;
this.tcpNoDelay = CacheServer.DEFAULT_TCP_NO_DELAY;
this.maximumTimeBetweenPings = CacheServer.DEFAULT_MAXIMUM_TIME_BETWEEN_PINGS;
this.maximumMessageCount = CacheServer.DEFAULT_MAXIMUM_MESSAGE_COUNT;
this.messageTimeToLive = CacheServer.DEFAULT_MESSAGE_TIME_TO_LIVE;
// TODO this should be configurable in CacheServer
this.transactionTimeToLive = Integer.getInteger("gemfire.cacheServer.transactionTimeToLive", 180);
this.groups = CacheServer.DEFAULT_GROUPS;
this.bindAddress = CacheServer.DEFAULT_BIND_ADDRESS;
this.hostnameForClients = CacheServer.DEFAULT_HOSTNAME_FOR_CLIENTS;
this.loadProbe = CacheServer.DEFAULT_LOAD_PROBE;
this.loadPollInterval = CacheServer.DEFAULT_LOAD_POLL_INTERVAL;
this.clientSubscriptionConfig = new ClientSubscriptionConfigImpl();
if (!attachListener) {
this.listener = null;
return;
}
listener = new ClientMembershipListener() {
@Override
public void memberJoined(ClientMembershipEvent event) {
if (event.isClient()) {
createAndSendMessage(event, ClientMembershipMessage.JOINED);
}
}
@Override
public void memberLeft(ClientMembershipEvent event) {
if (event.isClient()) {
createAndSendMessage(event, ClientMembershipMessage.LEFT);
}
}
@Override
public void memberCrashed(ClientMembershipEvent event) {
if (event.isClient()) {
createAndSendMessage(event, ClientMembershipMessage.CRASHED);
}
}
/**
* Method to create & send the ClientMembershipMessage to admin members.
* The message is sent only if there are any admin members in the
* distribution system.
*
* @param event
* describes a change in client membership
* @param type
* type of event - one of ClientMembershipMessage.JOINED,
* ClientMembershipMessage.LEFT, ClientMembershipMessage.CRASHED
*/
private void createAndSendMessage(ClientMembershipEvent event, int type) {
InternalDistributedSystem ds = null;
Cache cacheInstance = AbstractCacheServer.this.cache;
if (cacheInstance != null && !(cacheInstance instanceof CacheCreation)) {
ds = (InternalDistributedSystem)cacheInstance.getDistributedSystem();
} else {
ds = InternalDistributedSystem.getAnyInstance();
}
//ds could be null
if (ds != null && ds.isConnected()) {
DM dm = ds.getDistributionManager();
Set adminMemberSet = dm.getAdminMemberSet();
/* check if there are any admin members at all */
if (!adminMemberSet.isEmpty()) {
DistributedMember member = event.getMember();
ClientMembershipMessage msg =
new ClientMembershipMessage(event.getMemberId(),
member == null ? null : member.getHost(),
type);
msg.setRecipients(adminMemberSet);
dm.putOutgoing(msg);
}
}
}
};
ClientMembership.registerClientMembershipListener(listener);
}
///////////////////// Instance Methods /////////////////////
public int getPort() {
return this.port;
}
public void setPort(int port) {
this.port = port;
}
public String getBindAddress() {
return this.bindAddress;
}
public void setBindAddress(String address) {
this.bindAddress = address;
}
public String getHostnameForClients() {
return this.hostnameForClients;
}
public void setHostnameForClients(String name) {
this.hostnameForClients = name;
}
public int getMaxConnections() {
return this.maxConnections;
}
public void setMaxConnections(int maxCon) {
this.maxConnections = maxCon;
}
public int getMaxThreads() {
return this.maxThreads;
}
public void setMaxThreads(int maxThreads) {
this.maxThreads = maxThreads;
}
public void start() throws IOException {
// This method is invoked during testing, but it is not necessary
// to do anything.
}
public void setNotifyBySubscription(boolean b) {
//this.notifyBySubscription = true;
}
public boolean getNotifyBySubscription() {
return this.notifyBySubscription;
}
public void setSocketBufferSize(int socketBufferSize) {
this.socketBufferSize = socketBufferSize;
}
public int getSocketBufferSize() {
return this.socketBufferSize;
}
public void setMaximumTimeBetweenPings(int maximumTimeBetweenPings) {
this.maximumTimeBetweenPings = maximumTimeBetweenPings;
}
public int getMaximumTimeBetweenPings() {
return this.maximumTimeBetweenPings;
}
public int getMaximumMessageCount() {
return this.maximumMessageCount;
}
public void setMaximumMessageCount(int maximumMessageCount) {
this.maximumMessageCount = maximumMessageCount;
}
public void setTransactionTimeToLive(int seconds) {
this.transactionTimeToLive = seconds;
}
public int getTransactionTimeToLive() {
return this.transactionTimeToLive;
}
public int getMessageTimeToLive() {
return this.messageTimeToLive;
}
public void setMessageTimeToLive(int messageTimeToLive) {
this.messageTimeToLive = messageTimeToLive;
}
public void setGroups(String[] groups) {
if (groups == null) {
this.groups = CacheServer.DEFAULT_GROUPS;
}
else if (groups.length > 0) {
// copy it for isolation
String [] copy = new String[groups.length];
System.arraycopy(groups, 0, copy, 0, groups.length);
this.groups = copy;
} else {
this.groups = CacheServer.DEFAULT_GROUPS; // keep findbugs happy
}
}
public String[] getGroups() {
String[] result = this.groups;
if (result.length > 0) {
// copy it for isolation
String [] copy = new String[result.length];
System.arraycopy(result, 0, copy, 0, result.length);
result = copy;
}
return result;
}
public ServerLoadProbe getLoadProbe() {
return loadProbe;
}
public void setLoadProbe(ServerLoadProbe loadProbe) {
this.loadProbe = loadProbe;
}
public long getLoadPollInterval() {
return loadPollInterval;
}
public void setLoadPollInterval(long loadPollInterval) {
this.loadPollInterval = loadPollInterval;
}
public void setTcpNoDelay(boolean setting) {
this.tcpNoDelay = setting;
}
public boolean getTcpNoDelay() {
return this.tcpNoDelay;
}
public Cache getCache() {
return this.cache;
}
private static boolean eq(String s1, String s2) {
if (s1 == null) {
return s2 == null;
} else {
return s1.equals(s2);
}
}
/**
* Returns whether or not this bridge server has the same
* configuration as another bridge server.
*/
public boolean sameAs(CacheServer other) {
return getPort() == other.getPort()
&& eq(getBindAddress(), other.getBindAddress())
&& getSocketBufferSize() == other.getSocketBufferSize()
&& getMaximumTimeBetweenPings() == other.getMaximumTimeBetweenPings()
&& getNotifyBySubscription() == other.getNotifyBySubscription()
&& getMaxConnections() == other.getMaxConnections()
&& getMaxThreads() == other.getMaxThreads()
&& getMaximumMessageCount() == other.getMaximumMessageCount()
&& getMessageTimeToLive() == other.getMessageTimeToLive()
&& Arrays.equals(getGroups(), other.getGroups())
&& getLoadProbe().equals(other.getLoadProbe())
&& getLoadPollInterval() == other.getLoadPollInterval()
&& getTcpNoDelay() == other.getTcpNoDelay();
}
}
| |
/*
* Copyright (c) Joaquim Ley 2016. All Rights Reserved.
* <p/>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.joaquimley.avenging.ui.list;
import android.support.annotation.IntDef;
import android.support.annotation.Nullable;
import android.support.v7.widget.AppCompatImageView;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.joaquimley.avenging.R;
import com.joaquimley.core.data.model.CharacterMarvel;
import com.squareup.picasso.Picasso;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList;
import java.util.List;
/**
* {@link RecyclerView.Adapter} populated with {@link CharacterMarvel}
* makes the call to the {@link ListAdapter.InteractionListener}.
*/
public class ListAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
public static final String TAG = ListAdapter.class.getSimpleName();
private InteractionListener mListInteractionListener;
private final List<CharacterMarvel> mCharacterList;
/**
* ViewTypes serve as a mapping point to which layout should be inflated
*/
public static final int VIEW_TYPE_GALLERY = 0;
public static final int VIEW_TYPE_LIST = 1;
public static final int VIEW_TYPE_LOADING = 2;
@IntDef({VIEW_TYPE_LOADING, VIEW_TYPE_GALLERY, VIEW_TYPE_LIST})
@Retention(RetentionPolicy.SOURCE)
public @interface ViewType {
}
@ViewType
private int mViewType;
public ListAdapter() {
mCharacterList = new ArrayList<>();
mViewType = VIEW_TYPE_GALLERY;
mListInteractionListener = null;
}
@Override
public int getItemViewType(int position) {
return mCharacterList.get(position) == null ? VIEW_TYPE_LOADING : mViewType;
}
@Override
public int getItemCount() {
return mCharacterList.size();
}
@Override
public long getItemId(int position) {
return mCharacterList.size() >= position ? mCharacterList.get(position).getId() : -1;
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
if (viewType == VIEW_TYPE_LOADING) {
return onIndicationViewHolder(parent);
}
return onGenericItemViewHolder(parent, viewType);
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, final int position) {
if (holder.getItemViewType() == VIEW_TYPE_LOADING) {
return; // no-op
}
onBindGenericItemViewHolder((CharacterViewHolder) holder, position);
}
private RecyclerView.ViewHolder onIndicationViewHolder(ViewGroup parent) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_progress_bar, parent, false);
return new ProgressBarViewHolder(view);
}
private RecyclerView.ViewHolder onGenericItemViewHolder(ViewGroup parent, int viewType) {
View view = null;
switch (viewType) {
case VIEW_TYPE_GALLERY:
view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_character_gallery, parent, false);
break;
case VIEW_TYPE_LIST:
view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_character_list, parent, false);
break;
}
return new CharacterViewHolder(view);
}
private void onBindGenericItemViewHolder(final CharacterViewHolder holder, int position) {
holder.name.setText(mCharacterList.get(position).getName());
String characterImageUrl = mCharacterList.get(position).getImageUrl();
if (!TextUtils.isEmpty(characterImageUrl)) {
Picasso.with(holder.listItem.getContext())
.load(characterImageUrl)
.centerCrop()
.fit()
.into(holder.image);
}
}
public void add(CharacterMarvel item) {
add(null, item);
}
public void add(@Nullable Integer position, CharacterMarvel item) {
if (position != null) {
mCharacterList.add(position, item);
notifyItemInserted(position);
} else {
mCharacterList.add(item);
notifyItemInserted(mCharacterList.size() - 1);
}
}
public void addItems(List<CharacterMarvel> itemsList) {
mCharacterList.addAll(itemsList);
notifyItemRangeInserted(getItemCount(), mCharacterList.size() - 1);
}
public void remove(int position) {
if (mCharacterList.size() < position) {
Log.w(TAG, "The item at position: " + position + " doesn't exist");
return;
}
mCharacterList.remove(position);
notifyItemRemoved(position);
}
public void removeAll() {
mCharacterList.clear();
notifyDataSetChanged();
}
public boolean addLoadingView() {
if (getItemViewType(mCharacterList.size() - 1) != VIEW_TYPE_LOADING) {
add(null);
return true;
}
return false;
}
public boolean removeLoadingView() {
if (mCharacterList.size() > 1) {
int loadingViewPosition = mCharacterList.size() - 1;
if (getItemViewType(loadingViewPosition) == VIEW_TYPE_LOADING) {
remove(loadingViewPosition);
return true;
}
}
return false;
}
public boolean isEmpty() {
return getItemCount() == 0;
}
public int getViewType() {
return mViewType;
}
public void setViewType(@ViewType int viewType) {
mViewType = viewType;
}
/**
* ViewHolders
*/
public class ProgressBarViewHolder extends RecyclerView.ViewHolder {
public final ProgressBar progressBar;
public ProgressBarViewHolder(View view) {
super(view);
progressBar = (ProgressBar) view.findViewById(R.id.progress_bar);
}
}
public class CharacterViewHolder extends RecyclerView.ViewHolder {
public final View listItem;
public final TextView name;
public final AppCompatImageView image;
public CharacterViewHolder(View view) {
super(view);
name = (TextView) view.findViewById(R.id.name);
image = (AppCompatImageView) view.findViewById(R.id.image);
listItem = view;
listItem.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mListInteractionListener != null) {
mListInteractionListener.onListClick(mCharacterList.get(getAdapterPosition()),
image, getAdapterPosition());
}
}
});
}
@Override
public String toString() {
return super.toString() + " '" + name.getText() + "'";
}
}
/**
* Interface for handling list interactions
*/
public interface InteractionListener {
void onListClick(CharacterMarvel character, View sharedElementView, int adapterPosition);
}
public void setListInteractionListener(InteractionListener listInteractionListener) {
mListInteractionListener = listInteractionListener;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.coordination.CoordinationMetadata;
import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfigExclusion;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.Index;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
public class MetadataTests extends ESTestCase {
public void testFindAliases() {
Metadata metadata = Metadata.builder().put(IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias1").build())
.putAlias(AliasMetadata.builder("alias2").build())).build();
{
ImmutableOpenMap<String, List<AliasMetadata>> aliases = metadata.findAliases(new GetAliasesRequest(), Strings.EMPTY_ARRAY);
assertThat(aliases.size(), equalTo(0));
}
{
final GetAliasesRequest request;
if (randomBoolean()) {
request = new GetAliasesRequest();
} else {
request = new GetAliasesRequest(randomFrom("alias1", "alias2"));
// replacing with empty aliases behaves as if aliases were unspecified at request building
request.replaceAliases(Strings.EMPTY_ARRAY);
}
ImmutableOpenMap<String, List<AliasMetadata>> aliases = metadata.findAliases(new GetAliasesRequest(), new String[]{"index"});
assertThat(aliases.size(), equalTo(1));
List<AliasMetadata> aliasMetadataList = aliases.get("index");
assertThat(aliasMetadataList.size(), equalTo(2));
assertThat(aliasMetadataList.get(0).alias(), equalTo("alias1"));
assertThat(aliasMetadataList.get(1).alias(), equalTo("alias2"));
}
{
ImmutableOpenMap<String, List<AliasMetadata>> aliases =
metadata.findAliases(new GetAliasesRequest("alias*"), new String[]{"index"});
assertThat(aliases.size(), equalTo(1));
List<AliasMetadata> aliasMetadataList = aliases.get("index");
assertThat(aliasMetadataList.size(), equalTo(2));
assertThat(aliasMetadataList.get(0).alias(), equalTo("alias1"));
assertThat(aliasMetadataList.get(1).alias(), equalTo("alias2"));
}
{
ImmutableOpenMap<String, List<AliasMetadata>> aliases =
metadata.findAliases(new GetAliasesRequest("alias1"), new String[]{"index"});
assertThat(aliases.size(), equalTo(1));
List<AliasMetadata> aliasMetadataList = aliases.get("index");
assertThat(aliasMetadataList.size(), equalTo(1));
assertThat(aliasMetadataList.get(0).alias(), equalTo("alias1"));
}
{
ImmutableOpenMap<String, List<AliasMetadata>> aliases = metadata.findAllAliases(new String[]{"index"});
assertThat(aliases.size(), equalTo(1));
List<AliasMetadata> aliasMetadataList = aliases.get("index");
assertThat(aliasMetadataList.size(), equalTo(2));
assertThat(aliasMetadataList.get(0).alias(), equalTo("alias1"));
assertThat(aliasMetadataList.get(1).alias(), equalTo("alias2"));
}
{
ImmutableOpenMap<String, List<AliasMetadata>> aliases = metadata.findAllAliases(Strings.EMPTY_ARRAY);
assertThat(aliases.size(), equalTo(0));
}
}
public void testFindAliasWithExclusion() {
Metadata metadata = Metadata.builder().put(
IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias1").build())
.putAlias(AliasMetadata.builder("alias2").build())
).build();
List<AliasMetadata> aliases =
metadata.findAliases(new GetAliasesRequest().aliases("*", "-alias1"), new String[] {"index"}).get("index");
assertThat(aliases.size(), equalTo(1));
assertThat(aliases.get(0).alias(), equalTo("alias2"));
}
public void testFindAliasWithExclusionAndOverride() {
Metadata metadata = Metadata.builder().put(
IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("aa").build())
.putAlias(AliasMetadata.builder("ab").build())
.putAlias(AliasMetadata.builder("bb").build())
).build();
List<AliasMetadata> aliases =
metadata.findAliases(new GetAliasesRequest().aliases("a*", "-*b", "b*"), new String[] {"index"}).get("index");
assertThat(aliases.size(), equalTo(2));
assertThat(aliases.get(0).alias(), equalTo("aa"));
assertThat(aliases.get(1).alias(), equalTo("bb"));
}
public void testIndexAndAliasWithSameName() {
IndexMetadata.Builder builder = IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("index").build());
try {
Metadata.builder().put(builder).build();
fail("exception should have been thrown");
} catch (IllegalStateException e) {
assertThat(e.getMessage(),
equalTo("index and alias names need to be unique, but the following duplicates were found [index (alias of [index])]"));
}
}
public void testAliasCollidingWithAnExistingIndex() {
int indexCount = randomIntBetween(10, 100);
Set<String> indices = new HashSet<>(indexCount);
for (int i = 0; i < indexCount; i++) {
indices.add(randomAlphaOfLength(10));
}
Map<String, Set<String>> aliasToIndices = new HashMap<>();
for (String alias: randomSubsetOf(randomIntBetween(1, 10), indices)) {
aliasToIndices.put(alias, new HashSet<>(randomSubsetOf(randomIntBetween(1, 3), indices)));
}
int properAliases = randomIntBetween(0, 3);
for (int i = 0; i < properAliases; i++) {
aliasToIndices.put(randomAlphaOfLength(5), new HashSet<>(randomSubsetOf(randomIntBetween(1, 3), indices)));
}
Metadata.Builder metadataBuilder = Metadata.builder();
for (String index : indices) {
IndexMetadata.Builder indexBuilder = IndexMetadata.builder(index)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0);
aliasToIndices.forEach((key, value) -> {
if (value.contains(index)) {
indexBuilder.putAlias(AliasMetadata.builder(key).build());
}
});
metadataBuilder.put(indexBuilder);
}
try {
metadataBuilder.build();
fail("exception should have been thrown");
} catch (IllegalStateException e) {
assertThat(e.getMessage(), startsWith("index and alias names need to be unique"));
}
}
public void testValidateAliasWriteOnly() {
String alias = randomAlphaOfLength(5);
String indexA = randomAlphaOfLength(6);
String indexB = randomAlphaOfLength(7);
Boolean aWriteIndex = randomBoolean() ? null : randomBoolean();
Boolean bWriteIndex;
if (Boolean.TRUE.equals(aWriteIndex)) {
bWriteIndex = randomFrom(Boolean.FALSE, null);
} else {
bWriteIndex = randomFrom(Boolean.TRUE, Boolean.FALSE, null);
}
// when only one index/alias pair exist
Metadata metadata = Metadata.builder().put(buildIndexMetadata(indexA, alias, aWriteIndex)).build();
// when alias points to two indices, but valid
// one of the following combinations: [(null, null), (null, true), (null, false), (false, false)]
Metadata.builder(metadata).put(buildIndexMetadata(indexB, alias, bWriteIndex)).build();
// when too many write indices
Exception exception = expectThrows(IllegalStateException.class,
() -> {
IndexMetadata.Builder metaA = buildIndexMetadata(indexA, alias, true);
IndexMetadata.Builder metaB = buildIndexMetadata(indexB, alias, true);
Metadata.builder().put(metaA).put(metaB).build();
});
assertThat(exception.getMessage(), startsWith("alias [" + alias + "] has more than one write index ["));
}
public void testValidateHiddenAliasConsistency() {
String alias = randomAlphaOfLength(5);
String indexA = randomAlphaOfLength(6);
String indexB = randomAlphaOfLength(7);
{
Exception ex = expectThrows(IllegalStateException.class,
() -> buildMetadataWithHiddenIndexMix(alias, indexA, true, indexB, randomFrom(false, null)).build());
assertThat(ex.getMessage(), containsString("has is_hidden set to true on indices"));
}
{
Exception ex = expectThrows(IllegalStateException.class,
() -> buildMetadataWithHiddenIndexMix(alias, indexA, randomFrom(false, null), indexB, true).build());
assertThat(ex.getMessage(), containsString("has is_hidden set to true on indices"));
}
}
private Metadata.Builder buildMetadataWithHiddenIndexMix(String aliasName, String indexAName, Boolean indexAHidden,
String indexBName, Boolean indexBHidden) {
IndexMetadata.Builder indexAMeta = IndexMetadata.builder(indexAName)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder(aliasName).isHidden(indexAHidden).build());
IndexMetadata.Builder indexBMeta = IndexMetadata.builder(indexBName)
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder(aliasName).isHidden(indexBHidden).build());
return Metadata.builder().put(indexAMeta).put(indexBMeta);
}
public void testResolveIndexRouting() {
IndexMetadata.Builder builder = IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias0").build())
.putAlias(AliasMetadata.builder("alias1").routing("1").build())
.putAlias(AliasMetadata.builder("alias2").routing("1,2").build());
Metadata metadata = Metadata.builder().put(builder).build();
// no alias, no index
assertEquals(metadata.resolveIndexRouting(null, null), null);
assertEquals(metadata.resolveIndexRouting("0", null), "0");
// index, no alias
assertEquals(metadata.resolveIndexRouting(null, "index"), null);
assertEquals(metadata.resolveIndexRouting("0", "index"), "0");
// alias with no index routing
assertEquals(metadata.resolveIndexRouting(null, "alias0"), null);
assertEquals(metadata.resolveIndexRouting("0", "alias0"), "0");
// alias with index routing.
assertEquals(metadata.resolveIndexRouting(null, "alias1"), "1");
try {
metadata.resolveIndexRouting("0", "alias1");
fail("should fail");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage(), is("Alias [alias1] has index routing associated with it [1], " +
"and was provided with routing value [0], rejecting operation"));
}
// alias with invalid index routing.
try {
metadata.resolveIndexRouting(null, "alias2");
fail("should fail");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that" +
" resolved to several routing values, rejecting operation"));
}
try {
metadata.resolveIndexRouting("1", "alias2");
fail("should fail");
} catch (IllegalArgumentException ex) {
assertThat(ex.getMessage(), is("index/alias [alias2] provided with routing value [1,2] that" +
" resolved to several routing values, rejecting operation"));
}
IndexMetadata.Builder builder2 = IndexMetadata.builder("index2")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(AliasMetadata.builder("alias0").build());
Metadata metadataTwoIndices = Metadata.builder(metadata).put(builder2).build();
// alias with multiple indices
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
() -> metadataTwoIndices.resolveIndexRouting("1", "alias0"));
assertThat(exception.getMessage(), startsWith("Alias [alias0] has more than one index associated with it"));
}
public void testResolveWriteIndexRouting() {
AliasMetadata.Builder aliasZeroBuilder = AliasMetadata.builder("alias0");
if (randomBoolean()) {
aliasZeroBuilder.writeIndex(true);
}
IndexMetadata.Builder builder = IndexMetadata.builder("index")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(aliasZeroBuilder.build())
.putAlias(AliasMetadata.builder("alias1").routing("1").build())
.putAlias(AliasMetadata.builder("alias2").routing("1,2").build())
.putAlias(AliasMetadata.builder("alias3").writeIndex(false).build())
.putAlias(AliasMetadata.builder("alias4").routing("1,2").writeIndex(true).build());
Metadata metadata = Metadata.builder().put(builder).build();
// no alias, no index
assertEquals(metadata.resolveWriteIndexRouting(null, null), null);
assertEquals(metadata.resolveWriteIndexRouting("0", null), "0");
// index, no alias
assertEquals(metadata.resolveWriteIndexRouting(null, "index"), null);
assertEquals(metadata.resolveWriteIndexRouting("0", "index"), "0");
// alias with no index routing
assertEquals(metadata.resolveWriteIndexRouting(null, "alias0"), null);
assertEquals(metadata.resolveWriteIndexRouting("0", "alias0"), "0");
// alias with index routing.
assertEquals(metadata.resolveWriteIndexRouting(null, "alias1"), "1");
Exception exception = expectThrows(IllegalArgumentException.class, () -> metadata.resolveWriteIndexRouting("0", "alias1"));
assertThat(exception.getMessage(),
is("Alias [alias1] has index routing associated with it [1], and was provided with routing value [0], rejecting operation"));
// alias with invalid index routing.
exception = expectThrows(IllegalArgumentException.class, () -> metadata.resolveWriteIndexRouting(null, "alias2"));
assertThat(exception.getMessage(),
is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation"));
exception = expectThrows(IllegalArgumentException.class, () -> metadata.resolveWriteIndexRouting("1", "alias2"));
assertThat(exception.getMessage(),
is("index/alias [alias2] provided with routing value [1,2] that resolved to several routing values, rejecting operation"));
exception = expectThrows(IllegalArgumentException.class, () -> metadata.resolveWriteIndexRouting(randomFrom("1", null), "alias4"));
assertThat(exception.getMessage(),
is("index/alias [alias4] provided with routing value [1,2] that resolved to several routing values, rejecting operation"));
// alias with no write index
exception = expectThrows(IllegalArgumentException.class, () -> metadata.resolveWriteIndexRouting("1", "alias3"));
assertThat(exception.getMessage(),
is("alias [alias3] does not have a write index"));
// aliases with multiple indices
AliasMetadata.Builder aliasZeroBuilderTwo = AliasMetadata.builder("alias0");
if (randomBoolean()) {
aliasZeroBuilder.writeIndex(false);
}
IndexMetadata.Builder builder2 = IndexMetadata.builder("index2")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0)
.putAlias(aliasZeroBuilderTwo.build())
.putAlias(AliasMetadata.builder("alias1").routing("0").writeIndex(true).build())
.putAlias(AliasMetadata.builder("alias2").writeIndex(true).build());
Metadata metadataTwoIndices = Metadata.builder(metadata).put(builder2).build();
// verify that new write index is used
assertThat("0", equalTo(metadataTwoIndices.resolveWriteIndexRouting("0", "alias1")));
}
public void testUnknownFieldClusterMetadata() throws IOException {
BytesReference metadata = BytesReference.bytes(JsonXContent.contentBuilder()
.startObject()
.startObject("meta-data")
.field("random", "value")
.endObject()
.endObject());
try (XContentParser parser = createParser(JsonXContent.jsonXContent, metadata)) {
Metadata.Builder.fromXContent(parser);
fail();
} catch (IllegalArgumentException e) {
assertEquals("Unexpected field [random]", e.getMessage());
}
}
public void testUnknownFieldIndexMetadata() throws IOException {
BytesReference metadata = BytesReference.bytes(JsonXContent.contentBuilder()
.startObject()
.startObject("index_name")
.field("random", "value")
.endObject()
.endObject());
try (XContentParser parser = createParser(JsonXContent.jsonXContent, metadata)) {
IndexMetadata.Builder.fromXContent(parser);
fail();
} catch (IllegalArgumentException e) {
assertEquals("Unexpected field [random]", e.getMessage());
}
}
public void testMetadataGlobalStateChangesOnIndexDeletions() {
IndexGraveyard.Builder builder = IndexGraveyard.builder();
builder.addTombstone(new Index("idx1", UUIDs.randomBase64UUID()));
final Metadata metadata1 = Metadata.builder().indexGraveyard(builder.build()).build();
builder = IndexGraveyard.builder(metadata1.indexGraveyard());
builder.addTombstone(new Index("idx2", UUIDs.randomBase64UUID()));
final Metadata metadata2 = Metadata.builder(metadata1).indexGraveyard(builder.build()).build();
assertFalse("metadata not equal after adding index deletions", Metadata.isGlobalStateEquals(metadata1, metadata2));
final Metadata metadata3 = Metadata.builder(metadata2).build();
assertTrue("metadata equal when not adding index deletions", Metadata.isGlobalStateEquals(metadata2, metadata3));
}
public void testXContentWithIndexGraveyard() throws IOException {
final IndexGraveyard graveyard = IndexGraveyardTests.createRandom();
final Metadata originalMeta = Metadata.builder().indexGraveyard(graveyard).build();
final XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
Metadata.FORMAT.toXContent(builder, originalMeta);
builder.endObject();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) {
final Metadata fromXContentMeta = Metadata.fromXContent(parser);
assertThat(fromXContentMeta.indexGraveyard(), equalTo(originalMeta.indexGraveyard()));
}
}
public void testXContentClusterUUID() throws IOException {
final Metadata originalMeta = Metadata.builder().clusterUUID(UUIDs.randomBase64UUID())
.clusterUUIDCommitted(randomBoolean()).build();
final XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
Metadata.FORMAT.toXContent(builder, originalMeta);
builder.endObject();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) {
final Metadata fromXContentMeta = Metadata.fromXContent(parser);
assertThat(fromXContentMeta.clusterUUID(), equalTo(originalMeta.clusterUUID()));
assertThat(fromXContentMeta.clusterUUIDCommitted(), equalTo(originalMeta.clusterUUIDCommitted()));
}
}
public void testSerializationClusterUUID() throws IOException {
final Metadata originalMeta = Metadata.builder().clusterUUID(UUIDs.randomBase64UUID())
.clusterUUIDCommitted(randomBoolean()).build();
final BytesStreamOutput out = new BytesStreamOutput();
originalMeta.writeTo(out);
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
final Metadata fromStreamMeta = Metadata.readFrom(
new NamedWriteableAwareStreamInput(out.bytes().streamInput(), namedWriteableRegistry)
);
assertThat(fromStreamMeta.clusterUUID(), equalTo(originalMeta.clusterUUID()));
assertThat(fromStreamMeta.clusterUUIDCommitted(), equalTo(originalMeta.clusterUUIDCommitted()));
}
public void testMetadataGlobalStateChangesOnClusterUUIDChanges() {
final Metadata metadata1 = Metadata.builder().clusterUUID(UUIDs.randomBase64UUID()).clusterUUIDCommitted(randomBoolean()).build();
final Metadata metadata2 = Metadata.builder(metadata1).clusterUUID(UUIDs.randomBase64UUID()).build();
final Metadata metadata3 = Metadata.builder(metadata1).clusterUUIDCommitted(!metadata1.clusterUUIDCommitted()).build();
assertFalse(Metadata.isGlobalStateEquals(metadata1, metadata2));
assertFalse(Metadata.isGlobalStateEquals(metadata1, metadata3));
final Metadata metadata4 = Metadata.builder(metadata2).clusterUUID(metadata1.clusterUUID()).build();
assertTrue(Metadata.isGlobalStateEquals(metadata1, metadata4));
}
private static CoordinationMetadata.VotingConfiguration randomVotingConfig() {
return new CoordinationMetadata.VotingConfiguration(Sets.newHashSet(generateRandomStringArray(randomInt(10), 20, false)));
}
private Set<VotingConfigExclusion> randomVotingConfigExclusions() {
final int size = randomIntBetween(0, 10);
final Set<VotingConfigExclusion> nodes = new HashSet<>(size);
while (nodes.size() < size) {
assertTrue(nodes.add(new VotingConfigExclusion(randomAlphaOfLength(10), randomAlphaOfLength(10))));
}
return nodes;
}
public void testXContentWithCoordinationMetadata() throws IOException {
CoordinationMetadata originalMeta = new CoordinationMetadata(randomNonNegativeLong(), randomVotingConfig(), randomVotingConfig(),
randomVotingConfigExclusions());
Metadata metadata = Metadata.builder().coordinationMetadata(originalMeta).build();
final XContentBuilder builder = JsonXContent.contentBuilder();
builder.startObject();
Metadata.FORMAT.toXContent(builder, metadata);
builder.endObject();
try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) {
final CoordinationMetadata fromXContentMeta = Metadata.fromXContent(parser).coordinationMetadata();
assertThat(fromXContentMeta, equalTo(originalMeta));
}
}
public void testGlobalStateEqualsCoordinationMetadata() {
CoordinationMetadata coordinationMetadata1 = new CoordinationMetadata(randomNonNegativeLong(), randomVotingConfig(),
randomVotingConfig(), randomVotingConfigExclusions());
Metadata metadata1 = Metadata.builder().coordinationMetadata(coordinationMetadata1).build();
CoordinationMetadata coordinationMetadata2 = new CoordinationMetadata(randomNonNegativeLong(), randomVotingConfig(),
randomVotingConfig(), randomVotingConfigExclusions());
Metadata metadata2 = Metadata.builder().coordinationMetadata(coordinationMetadata2).build();
assertTrue(Metadata.isGlobalStateEquals(metadata1, metadata1));
assertFalse(Metadata.isGlobalStateEquals(metadata1, metadata2));
}
public void testSerializationWithIndexGraveyard() throws IOException {
final IndexGraveyard graveyard = IndexGraveyardTests.createRandom();
final Metadata originalMeta = Metadata.builder().indexGraveyard(graveyard).build();
final BytesStreamOutput out = new BytesStreamOutput();
originalMeta.writeTo(out);
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
final Metadata fromStreamMeta = Metadata.readFrom(
new NamedWriteableAwareStreamInput(out.bytes().streamInput(), namedWriteableRegistry)
);
assertThat(fromStreamMeta.indexGraveyard(), equalTo(fromStreamMeta.indexGraveyard()));
}
public void testFindMappings() throws IOException {
Metadata metadata = Metadata.builder()
.put(IndexMetadata.builder("index1")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0))
.putMapping(FIND_MAPPINGS_TEST_ITEM))
.put(IndexMetadata.builder("index2")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0))
.putMapping(FIND_MAPPINGS_TEST_ITEM)).build();
{
ImmutableOpenMap<String, MappingMetadata> mappings = metadata.findMappings(Strings.EMPTY_ARRAY,
MapperPlugin.NOOP_FIELD_FILTER);
assertEquals(0, mappings.size());
}
{
ImmutableOpenMap<String, MappingMetadata> mappings = metadata.findMappings(new String[]{"index1"},
MapperPlugin.NOOP_FIELD_FILTER);
assertEquals(1, mappings.size());
assertIndexMappingsNotFiltered(mappings, "index1");
}
{
ImmutableOpenMap<String, MappingMetadata> mappings = metadata.findMappings(
new String[]{"index1", "index2"},
MapperPlugin.NOOP_FIELD_FILTER);
assertEquals(2, mappings.size());
assertIndexMappingsNotFiltered(mappings, "index1");
assertIndexMappingsNotFiltered(mappings, "index2");
}
}
public void testFindMappingsNoOpFilters() throws IOException {
MappingMetadata originalMappingMetadata = new MappingMetadata("_doc",
XContentHelper.convertToMap(JsonXContent.jsonXContent, FIND_MAPPINGS_TEST_ITEM, true));
Metadata metadata = Metadata.builder()
.put(IndexMetadata.builder("index1")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0))
.putMapping(originalMappingMetadata)).build();
{
ImmutableOpenMap<String, MappingMetadata> mappings = metadata.findMappings(new String[]{"index1"},
MapperPlugin.NOOP_FIELD_FILTER);
MappingMetadata mappingMetadata = mappings.get("index1");
assertSame(originalMappingMetadata, mappingMetadata);
}
{
ImmutableOpenMap<String, MappingMetadata> mappings = metadata.findMappings(new String[]{"index1"},
index -> field -> randomBoolean());
MappingMetadata mappingMetadata = mappings.get("index1");
assertNotSame(originalMappingMetadata, mappingMetadata);
}
}
@SuppressWarnings("unchecked")
public void testFindMappingsWithFilters() throws IOException {
String mapping = FIND_MAPPINGS_TEST_ITEM;
if (randomBoolean()) {
Map<String, Object> stringObjectMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, FIND_MAPPINGS_TEST_ITEM, false);
Map<String, Object> doc = (Map<String, Object>)stringObjectMap.get("_doc");
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
builder.map(doc);
mapping = Strings.toString(builder);
}
}
Metadata metadata = Metadata.builder()
.put(IndexMetadata.builder("index1")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0))
.putMapping(mapping))
.put(IndexMetadata.builder("index2")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0))
.putMapping(mapping))
.put(IndexMetadata.builder("index3")
.settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0))
.putMapping(mapping)).build();
{
ImmutableOpenMap<String, MappingMetadata> mappings = metadata.findMappings(
new String[]{"index1", "index2", "index3"},
index -> {
if (index.equals("index1")) {
return field -> field.startsWith("name.") == false && field.startsWith("properties.key.") == false
&& field.equals("age") == false && field.equals("address.location") == false;
}
if (index.equals("index2")) {
return field -> false;
}
return MapperPlugin.NOOP_FIELD_PREDICATE;
});
assertIndexMappingsNoFields(mappings, "index2");
assertIndexMappingsNotFiltered(mappings, "index3");
MappingMetadata docMapping = mappings.get("index1");
assertNotNull(docMapping);
Map<String, Object> sourceAsMap = docMapping.getSourceAsMap();
assertEquals(3, sourceAsMap.size());
assertTrue(sourceAsMap.containsKey("_routing"));
assertTrue(sourceAsMap.containsKey("_source"));
Map<String, Object> typeProperties = (Map<String, Object>) sourceAsMap.get("properties");
assertEquals(6, typeProperties.size());
assertTrue(typeProperties.containsKey("birth"));
assertTrue(typeProperties.containsKey("ip"));
assertTrue(typeProperties.containsKey("suggest"));
Map<String, Object> name = (Map<String, Object>) typeProperties.get("name");
assertNotNull(name);
assertEquals(1, name.size());
Map<String, Object> nameProperties = (Map<String, Object>) name.get("properties");
assertNotNull(nameProperties);
assertEquals(0, nameProperties.size());
Map<String, Object> address = (Map<String, Object>) typeProperties.get("address");
assertNotNull(address);
assertEquals(2, address.size());
assertTrue(address.containsKey("type"));
Map<String, Object> addressProperties = (Map<String, Object>) address.get("properties");
assertNotNull(addressProperties);
assertEquals(2, addressProperties.size());
assertLeafs(addressProperties, "street", "area");
Map<String, Object> properties = (Map<String, Object>) typeProperties.get("properties");
assertNotNull(properties);
assertEquals(2, properties.size());
assertTrue(properties.containsKey("type"));
Map<String, Object> propertiesProperties = (Map<String, Object>) properties.get("properties");
assertNotNull(propertiesProperties);
assertEquals(2, propertiesProperties.size());
assertLeafs(propertiesProperties, "key");
assertMultiField(propertiesProperties, "value", "keyword");
}
{
ImmutableOpenMap<String, MappingMetadata> mappings = metadata.findMappings(
new String[]{"index1", "index2" , "index3"},
index -> field -> (index.equals("index3") && field.endsWith("keyword")));
assertIndexMappingsNoFields(mappings, "index1");
assertIndexMappingsNoFields(mappings, "index2");
MappingMetadata mappingMetadata = mappings.get("index3");
Map<String, Object> sourceAsMap = mappingMetadata.getSourceAsMap();
assertEquals(3, sourceAsMap.size());
assertTrue(sourceAsMap.containsKey("_routing"));
assertTrue(sourceAsMap.containsKey("_source"));
Map<String, Object> typeProperties = (Map<String, Object>) sourceAsMap.get("properties");
assertNotNull(typeProperties);
assertEquals(1, typeProperties.size());
Map<String, Object> properties = (Map<String, Object>) typeProperties.get("properties");
assertNotNull(properties);
assertEquals(2, properties.size());
assertTrue(properties.containsKey("type"));
Map<String, Object> propertiesProperties = (Map<String, Object>) properties.get("properties");
assertNotNull(propertiesProperties);
assertEquals(2, propertiesProperties.size());
Map<String, Object> key = (Map<String, Object>) propertiesProperties.get("key");
assertEquals(1, key.size());
Map<String, Object> keyProperties = (Map<String, Object>) key.get("properties");
assertEquals(1, keyProperties.size());
assertLeafs(keyProperties, "keyword");
Map<String, Object> value = (Map<String, Object>) propertiesProperties.get("value");
assertEquals(1, value.size());
Map<String, Object> valueProperties = (Map<String, Object>) value.get("properties");
assertEquals(1, valueProperties.size());
assertLeafs(valueProperties, "keyword");
}
{
ImmutableOpenMap<String, MappingMetadata> mappings = metadata.findMappings(
new String[]{"index1", "index2" , "index3"},
index -> field -> (index.equals("index2")));
assertIndexMappingsNoFields(mappings, "index1");
assertIndexMappingsNoFields(mappings, "index3");
assertIndexMappingsNotFiltered(mappings, "index2");
}
}
private static IndexMetadata.Builder buildIndexMetadata(String name, String alias, Boolean writeIndex) {
return IndexMetadata.builder(name)
.settings(settings(Version.CURRENT)).creationDate(randomNonNegativeLong())
.putAlias(AliasMetadata.builder(alias).writeIndex(writeIndex))
.numberOfShards(1).numberOfReplicas(0);
}
@SuppressWarnings("unchecked")
private static void assertIndexMappingsNoFields(ImmutableOpenMap<String, MappingMetadata> mappings,
String index) {
MappingMetadata docMapping = mappings.get(index);
assertNotNull(docMapping);
Map<String, Object> sourceAsMap = docMapping.getSourceAsMap();
assertEquals(3, sourceAsMap.size());
assertTrue(sourceAsMap.containsKey("_routing"));
assertTrue(sourceAsMap.containsKey("_source"));
Map<String, Object> typeProperties = (Map<String, Object>) sourceAsMap.get("properties");
assertEquals(0, typeProperties.size());
}
@SuppressWarnings("unchecked")
private static void assertIndexMappingsNotFiltered(ImmutableOpenMap<String, MappingMetadata> mappings,
String index) {
MappingMetadata docMapping = mappings.get(index);
assertNotNull(docMapping);
Map<String, Object> sourceAsMap = docMapping.getSourceAsMap();
assertEquals(3, sourceAsMap.size());
assertTrue(sourceAsMap.containsKey("_routing"));
assertTrue(sourceAsMap.containsKey("_source"));
Map<String, Object> typeProperties = (Map<String, Object>) sourceAsMap.get("properties");
assertEquals(7, typeProperties.size());
assertTrue(typeProperties.containsKey("birth"));
assertTrue(typeProperties.containsKey("age"));
assertTrue(typeProperties.containsKey("ip"));
assertTrue(typeProperties.containsKey("suggest"));
Map<String, Object> name = (Map<String, Object>) typeProperties.get("name");
assertNotNull(name);
assertEquals(1, name.size());
Map<String, Object> nameProperties = (Map<String, Object>) name.get("properties");
assertNotNull(nameProperties);
assertEquals(2, nameProperties.size());
assertLeafs(nameProperties, "first", "last");
Map<String, Object> address = (Map<String, Object>) typeProperties.get("address");
assertNotNull(address);
assertEquals(2, address.size());
assertTrue(address.containsKey("type"));
Map<String, Object> addressProperties = (Map<String, Object>) address.get("properties");
assertNotNull(addressProperties);
assertEquals(3, addressProperties.size());
assertLeafs(addressProperties, "street", "location", "area");
Map<String, Object> properties = (Map<String, Object>) typeProperties.get("properties");
assertNotNull(properties);
assertEquals(2, properties.size());
assertTrue(properties.containsKey("type"));
Map<String, Object> propertiesProperties = (Map<String, Object>) properties.get("properties");
assertNotNull(propertiesProperties);
assertEquals(2, propertiesProperties.size());
assertMultiField(propertiesProperties, "key", "keyword");
assertMultiField(propertiesProperties, "value", "keyword");
}
@SuppressWarnings("unchecked")
public static void assertLeafs(Map<String, Object> properties, String... fields) {
for (String field : fields) {
assertTrue(properties.containsKey(field));
Map<String, Object> fieldProp = (Map<String, Object>)properties.get(field);
assertNotNull(fieldProp);
assertFalse(fieldProp.containsKey("properties"));
assertFalse(fieldProp.containsKey("fields"));
}
}
public static void assertMultiField(Map<String, Object> properties, String field, String... subFields) {
assertTrue(properties.containsKey(field));
@SuppressWarnings("unchecked")
Map<String, Object> fieldProp = (Map<String, Object>)properties.get(field);
assertNotNull(fieldProp);
assertTrue(fieldProp.containsKey("fields"));
@SuppressWarnings("unchecked")
Map<String, Object> subFieldsDef = (Map<String, Object>) fieldProp.get("fields");
assertLeafs(subFieldsDef, subFields);
}
private static final String FIND_MAPPINGS_TEST_ITEM = "{\n" +
" \"_doc\": {\n" +
" \"_routing\": {\n" +
" \"required\":true\n" +
" }," +
" \"_source\": {\n" +
" \"enabled\":false\n" +
" }," +
" \"properties\": {\n" +
" \"name\": {\n" +
" \"properties\": {\n" +
" \"first\": {\n" +
" \"type\": \"keyword\"\n" +
" },\n" +
" \"last\": {\n" +
" \"type\": \"keyword\"\n" +
" }\n" +
" }\n" +
" },\n" +
" \"birth\": {\n" +
" \"type\": \"date\"\n" +
" },\n" +
" \"age\": {\n" +
" \"type\": \"integer\"\n" +
" },\n" +
" \"ip\": {\n" +
" \"type\": \"ip\"\n" +
" },\n" +
" \"suggest\" : {\n" +
" \"type\": \"completion\"\n" +
" },\n" +
" \"address\": {\n" +
" \"type\": \"object\",\n" +
" \"properties\": {\n" +
" \"street\": {\n" +
" \"type\": \"keyword\"\n" +
" },\n" +
" \"location\": {\n" +
" \"type\": \"geo_point\"\n" +
" },\n" +
" \"area\": {\n" +
" \"type\": \"geo_shape\", \n" +
" \"tree\": \"quadtree\",\n" +
" \"precision\": \"1m\"\n" +
" }\n" +
" }\n" +
" },\n" +
" \"properties\": {\n" +
" \"type\": \"nested\",\n" +
" \"properties\": {\n" +
" \"key\" : {\n" +
" \"type\": \"text\",\n" +
" \"fields\": {\n" +
" \"keyword\" : {\n" +
" \"type\" : \"keyword\"\n" +
" }\n" +
" }\n" +
" },\n" +
" \"value\" : {\n" +
" \"type\": \"text\",\n" +
" \"fields\": {\n" +
" \"keyword\" : {\n" +
" \"type\" : \"keyword\"\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
" }\n" +
"}";
public void testTransientSettingsOverridePersistentSettings() {
final Setting setting = Setting.simpleString("key");
final Metadata metadata = Metadata.builder()
.persistentSettings(Settings.builder().put(setting.getKey(), "persistent-value").build())
.transientSettings(Settings.builder().put(setting.getKey(), "transient-value").build()).build();
assertThat(setting.get(metadata.settings()), equalTo("transient-value"));
}
public void testBuilderRejectsNullCustom() {
final Metadata.Builder builder = Metadata.builder();
final String key = randomAlphaOfLength(10);
assertThat(expectThrows(NullPointerException.class, () -> builder.putCustom(key, null)).getMessage(), containsString(key));
}
public void testBuilderRejectsNullInCustoms() {
final Metadata.Builder builder = Metadata.builder();
final String key = randomAlphaOfLength(10);
final ImmutableOpenMap.Builder<String, Metadata.Custom> mapBuilder = ImmutableOpenMap.builder();
mapBuilder.put(key, null);
final ImmutableOpenMap<String, Metadata.Custom> map = mapBuilder.build();
assertThat(expectThrows(NullPointerException.class, () -> builder.customs(map)).getMessage(), containsString(key));
}
public void testBuilderRejectsDataStreamThatConflictsWithIndex() {
final String dataStreamName = "my-data-stream";
Metadata.Builder b = Metadata.builder()
.put(IndexMetadata.builder(dataStreamName)
.settings(settings(Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(1)
.build(), false)
.put(new DataStream(dataStreamName, "ts", Collections.emptyList()));
IllegalStateException e = expectThrows(IllegalStateException.class, b::build);
assertThat(e.getMessage(), containsString("data stream [" + dataStreamName + "] conflicts with existing index or alias"));
}
public void testBuilderRejectsDataStreamThatConflictsWithAlias() {
final String dataStreamName = "my-data-stream";
Metadata.Builder b = Metadata.builder()
.put(IndexMetadata.builder(dataStreamName + "z")
.settings(settings(Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(1)
.putAlias(AliasMetadata.builder(dataStreamName).build())
.build(), false)
.put(new DataStream(dataStreamName, "ts", Collections.emptyList()));
IllegalStateException e = expectThrows(IllegalStateException.class, b::build);
assertThat(e.getMessage(), containsString("data stream [" + dataStreamName + "] conflicts with existing index or alias"));
}
public void testBuilderRejectsDataStreamWithConflictingBackingIndices() {
final String dataStreamName = "my-data-stream";
final String conflictingIndex = dataStreamName + "-000001";
Metadata.Builder b = Metadata.builder()
.put(IndexMetadata.builder(conflictingIndex)
.settings(settings(Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(1)
.build(), false)
.put(new DataStream(dataStreamName, "ts", Collections.emptyList()));
IllegalStateException e = expectThrows(IllegalStateException.class, b::build);
assertThat(e.getMessage(), containsString("data stream [" + dataStreamName +
"] could create backing indices that conflict with 1 existing index(s) or alias(s) including '" + conflictingIndex + "'"));
}
public void testBuilderForDataStreamWithRandomlyNumberedBackingIndices() {
final String dataStreamName = "my-data-stream";
final List<Index> backingIndices = new ArrayList<>();
final int numBackingIndices = randomIntBetween(2, 5);
int lastBackingIndexNum = randomIntBetween(9, 50);
Metadata.Builder b = Metadata.builder();
for (int k = 1; k <= numBackingIndices; k++) {
IndexMetadata im = IndexMetadata.builder(String.format(Locale.ROOT, "%s-%06d", dataStreamName, lastBackingIndexNum))
.settings(settings(Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(1)
.build();
b.put(im, false);
backingIndices.add(im.getIndex());
lastBackingIndexNum = randomIntBetween(lastBackingIndexNum + 1, lastBackingIndexNum + 50);
}
b.put(new DataStream(dataStreamName, "ts", backingIndices));
Metadata metadata = b.build();
assertThat(metadata.dataStreams().size(), equalTo(1));
assertThat(metadata.dataStreams().get(dataStreamName).getName(), equalTo(dataStreamName));
}
public void testSerialization() throws IOException {
final Metadata orig = randomMetadata();
final BytesStreamOutput out = new BytesStreamOutput();
orig.writeTo(out);
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
final Metadata fromStreamMeta = Metadata.readFrom(new NamedWriteableAwareStreamInput(out.bytes().streamInput(),
namedWriteableRegistry));
assertTrue(Metadata.isGlobalStateEquals(orig, fromStreamMeta));
}
public static Metadata randomMetadata() {
return Metadata.builder()
.put(buildIndexMetadata("index", "alias", randomBoolean() ? null : randomBoolean()).build(), randomBoolean())
.put(IndexTemplateMetadata.builder("template" + randomAlphaOfLength(3))
.patterns(Arrays.asList("bar-*", "foo-*"))
.settings(Settings.builder()
.put("random_index_setting_" + randomAlphaOfLength(3), randomAlphaOfLength(5))
.build())
.build())
.persistentSettings(Settings.builder()
.put("setting" + randomAlphaOfLength(3), randomAlphaOfLength(4))
.build())
.transientSettings(Settings.builder()
.put("other_setting" + randomAlphaOfLength(3), randomAlphaOfLength(4))
.build())
.clusterUUID("uuid" + randomAlphaOfLength(3))
.clusterUUIDCommitted(randomBoolean())
.indexGraveyard(IndexGraveyardTests.createRandom())
.version(randomNonNegativeLong())
.put("component_template_" + randomAlphaOfLength(3), ComponentTemplateTests.randomInstance())
.put("index_template_v2_" + randomAlphaOfLength(3), IndexTemplateV2Tests.randomInstance())
.put(DataStreamTests.randomInstance())
.build();
}
}
| |
/*******************************************************************************
* Copyright (c) 2012 Hidehiko Masuhara.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Public License v3.0
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/gpl.html
*
* Contributors:
* Hidehiko Masuhara - initial API and implementation
*
* $Id: ImagePanel.java,v 1.7 2012/09/14 05:06:45 masuhara Exp $
******************************************************************************/
package jp.ac.utokyo.c.graco.isrb;
import java.awt.Color;
import java.awt.Component;
import java.awt.Frame;
import java.awt.Graphics;
import java.awt.Window;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.image.BufferedImage;
import java.awt.image.DataBufferInt;
import java.io.File;
import java.io.IOException;
import java.util.Vector;
import javax.imageio.ImageIO;
import javax.swing.AbstractAction;
import javax.swing.JFileChooser;
import javax.swing.JFrame;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.KeyStroke;
import javax.swing.filechooser.FileNameExtensionFilter;
@SuppressWarnings("serial")
public class ImagePanel extends JPanel {
private static final int FRAME_WIDTH = 500;
private static final int FRAME_HEIGHT = 500;
private int imageWidth = 0;
private int imageHeight = 0;
private BufferedImage img;
private Vector<BufferedImage> frames = new Vector<BufferedImage>();
private boolean playing;
protected int currentFrame;
private ImageFrameAction playOrStopAction;
private int[] pixelData;
abstract class ImageFrameAction extends AbstractAction {
ImageFrameAction(int mnemonicKey, KeyStroke acceleratorKey,
String actionName, boolean enabled) {
super(actionName);
this.putValue(AbstractAction.MNEMONIC_KEY, mnemonicKey);
this.putValue(AbstractAction.ACCELERATOR_KEY, acceleratorKey);
this.setEnabled(enabled);
ImagePanel.this.getActionMap().put(actionName, this);
ImagePanel.this.getInputMap().put(acceleratorKey, actionName);
}
}
public Window getEnclosingFrame() {
Component f = ImagePanel.this;
while (!(f instanceof Window))
f = f.getParent();
return (Window) f;
}
private ImagePanel() {
playOrStopAction = new ImageFrameAction(KeyEvent.VK_A,
KeyStroke.getKeyStroke(KeyEvent.VK_A, KeyEvent.CTRL_MASK),
"Start/Stop animation", false) {//disabled at first
@Override
public void actionPerformed(ActionEvent e) {
playOrStop();
}
};
AbstractAction saveAsAction = new ImageFrameAction(KeyEvent.VK_S,
KeyStroke.getKeyStroke(KeyEvent.VK_S, KeyEvent.CTRL_MASK),
"Save Image as...", true) {
@Override
public void actionPerformed(ActionEvent e) {
saveImageAs();
}
};
AbstractAction closeAction = new ImageFrameAction(KeyEvent.VK_C,
KeyStroke.getKeyStroke(KeyEvent.VK_W, KeyEvent.CTRL_MASK),
"Close", true) {
@Override
public void actionPerformed(ActionEvent e) {
getEnclosingFrame().dispose();
}
};
final JPopupMenu popup = new JPopupMenu();
JMenuItem playOrStopItem = new JMenuItem(playOrStopAction);
JMenuItem closeWindowItem = new JMenuItem(closeAction);
JMenuItem saveImageAsItem = new JMenuItem(saveAsAction);
popup.add(playOrStopItem);
popup.add(saveImageAsItem);
popup.add(closeWindowItem);
setComponentPopupMenu(popup);
// to show the pop-up menu even with a click of the left-button
// (setComponentPopupMenu will make the menu appearing with
// right-clicking, but I'd like to show it with a left-click as well.)
addMouseListener(new MouseAdapter() {
@Override
public void mouseReleased(MouseEvent e) {show(e);}
@Override
public void mousePressed(MouseEvent e) {show(e);}
private void show(MouseEvent e) {
popup.show(e.getComponent(), e.getX(), e.getY());
}
});
}
// TODO: show play button when animation is possible
// TODO: spreadsheet view
public void saveImageAs() {
try {
File f = askFileName();
if (f != null) {
String name = f.toString();
String ext = name.substring(name.indexOf('.', 0) + 1);
ImageIO.write(img, ext, f);
}
} catch (IOException e) {
System.err.println(e);
e.printStackTrace();
}
}
private File askFileName() {
JFileChooser chooser = new JFileChooser();
// TODO remember the directory lastly chosen, and set that as its
// default
chooser.setAcceptAllFileFilterUsed(false);
chooser.addChoosableFileFilter(new FileNameExtensionFilter(
"PNG format", "png", "PNG"));
chooser.addChoosableFileFilter(new FileNameExtensionFilter(
"JPEG format", "jpg", "JPG", "jpeg", "JPEG"));
chooser.setSelectedFile(defaultFileNameToSave(chooser
.getCurrentDirectory()));
int returnVal = chooser.showSaveDialog(getParent());
File f = returnVal == JFileChooser.APPROVE_OPTION ? chooser
.getSelectedFile() : null;
return f;
}
private File defaultFileNameToSave(File directory) {
String base = "image";
String ext = ".png";
int counter = 0;
File f = new File(directory, base + ext);
while (f.exists())
f = new File(directory, base + (counter++) + ext);
return f;
}
@Override
public void paint(Graphics g) {
g.setColor(Color.blue);
g.fillRect(0, 0, getWidth(), getHeight());// TODO do only when the frame
// is resized
BufferedImage imageToShow = getImage();
if (imageToShow == null)
return;
int frameWidth = getWidth(); // Since the panel might be resized after
int frameHeight = getHeight();// creation, we obtain actual size
int scale = Math.max(
1,
Math.min((frameWidth - 50) / imageWidth, (frameHeight - 50)
/ imageHeight));
int dx1 = (frameWidth - (imageWidth * scale)) / 2;
int dy1 = (frameHeight - (imageHeight * scale)) / 2;
int dx2 = dx1 + imageWidth * scale;
int dy2 = dy1 + imageHeight * scale;
g.drawImage(imageToShow, dx1, dy1, dx2, dy2, 0, 0, imageWidth,
imageHeight, null);
}
private BufferedImage getImage() {
return playing ? frames.get(currentFrame) : img;
}
private void prepareImage(int w, int h) {
if (img == null) {
imageWidth = w;
imageHeight = h;
img = new BufferedImage(imageWidth, imageHeight,
BufferedImage.TYPE_INT_RGB);
// frames.add(img);
pixelData = ((DataBufferInt)img.getRaster().getDataBuffer()).getData();
}
}
private void recordImage() {
BufferedImage savedImage = new BufferedImage(imageWidth,
imageHeight, BufferedImage.TYPE_INT_RGB);
savedImage.setData(img.copyData(null));
frames.add(savedImage);
if (frames.size()>=2)
playOrStopAction.setEnabled(true);
}
public void showGray(double[][] a) {
showGrayWithDataBuffer(a);
}
// private void showGrayWithGraphics(double[][] a) {
// Graphics g = prepareImage(a[0].length, a.length);
// for (int y = 0; y < a.length; y++)
// for (int x = 0; x < a[y].length; x++) {
// double intensity = a[y][x];
// g.setColor(new Color(normalize8bit(intensity),normalize8bit(intensity),normalize8bit(intensity)));
// g.fillRect(x, y, 1, 1);
// }
// repaint();
// }
private void showGrayWithDataBuffer(double[][] a) {
prepareImage(a[0].length, a.length);
for (int y = 0; y < Math.min(imageHeight, a.length); y++)
for (int x = 0; x < Math.min(imageWidth, a[y].length); x++) {
int intensity = normalize8bit(a[y][x]);
pixelData[y*imageWidth + x] = (intensity<<16) | (intensity<<8) | intensity;
}
repaint();
recordImage();
}
public void showColor(double[][][] a) {
// showColorWithGraphics(a);//13.7 secs
showColorWithDataBuffer(a);//10.0 secs
}
// private void showColorWithGraphics(double[][][] a) {
// Graphics graphics = prepareImage(a[0].length, a.length);
// for (int y = 0; y < a.length; y++)
// for (int x = 0; x < a[y].length; x++) {
// int r = normalize8bit(a[y][x][0]);
// int g = normalize8bit(a[y][x][1]);
// int b = normalize8bit(a[y][x][2]);
// graphics.setColor(new Color(r, g, b));
// graphics.fillRect(x, y, 1, 1);
// }
// repaint();
//
// }
private void showColorWithDataBuffer(double[][][] a) {
prepareImage(a[0].length, a.length);
pixelData = ((DataBufferInt)img.getRaster().getDataBuffer()).getData();
try {
storeColorImageFast(a, pixelData);
} catch (ArrayIndexOutOfBoundsException e)
{ storeColorImageSafe(a, pixelData); }
repaint();
recordImage();
}
private void storeColorImageFast(double[][][] a, int[] pixelData) {
for (int y = 0; y < Math.min(imageHeight, a.length); y++)
for (int x = 0; x < Math.min(imageWidth, a[y].length); x++) {
int r=0,g=0,b=0;
r = normalize8bit(a[y][x][0]);
g = normalize8bit(a[y][x][1]);
b = normalize8bit(a[y][x][2]);
pixelData[y*imageWidth + x] = (r<<16) | (g<<8) | b;
}
}
private void storeColorImageSafe(double[][][] a, int[] pixelData) {
for (int y = 0; y < Math.min(imageHeight, a.length); y++)
for (int x = 0; x < Math.min(imageWidth, a[y].length); x++) {
int r=0,g=0,b=0;
try {
r = normalize8bit(a[y][x][0]);
g = normalize8bit(a[y][x][1]);
b = normalize8bit(a[y][x][2]);
} catch (ArrayIndexOutOfBoundsException e) { ; }
pixelData[y*imageWidth + x] = (r<<16) | (g<<8) | b;
}
}
private final int normalize8bit(final double intensity) {
//1: without normalization
// return (int) (intensity*255);
//2: with normalization
return Math.max(0, Math.min(255, (int) (intensity * 255)));
//Benchmark tests of drawing ten 400x400 images:
//1: 5.8 secs
//2: 5.6 secs
}
public void updatePixel(int x, int y, double r, double g, double b) {
int rint = normalize8bit(r);
int gint = normalize8bit(g);
int bint = normalize8bit(b);
pixelData[y*imageWidth+x] = (rint<<16) | (gint<<8) | bint;
repaint();
}
public synchronized void playOrStop() {
if (playing)
playing = false;
else
play();
}
// to start animation
private void play() {
playing = true;// TODO thread safety
new Thread() {
@Override
public void run() {
currentFrame = 0; // Since the first element in the frames
// contains the latest image, we start
// animation from index 1, which should
// contain the oldest shown image.
Window w = getEnclosingFrame();
// stop animation when w is disposed
int interval = Math.max(1000/30, 1000/frames.size());
while (playing && w.isDisplayable()) {
try {
Thread.sleep(interval);
currentFrame = (currentFrame + 1) % frames.size();
repaint();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}.start();
}
public static ImagePanel createFrame() throws InterruptedException {
final JFrame f = new JFrame("isrb");
f.setSize(FRAME_WIDTH, FRAME_HEIGHT);
// to allow the VM terminate after the last frame is closed. (Otherwise,
// the VM may not terminate even if all the frames are closed and the
// main thread is stopped.)
f.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
ImagePanel imagePanel = new ImagePanel();
f.getContentPane().add(imagePanel);
f.validate();
f.setVisible(true);
return imagePanel;
}
public static void atExit() {
Frame[] fs = JFrame.getFrames();
boolean existsVisibleFrame = false;
for (Frame f : fs)
existsVisibleFrame = existsVisibleFrame || f.isVisible();
if (existsVisibleFrame) {
JOptionPane.showMessageDialog(null, "isrb: all windows will be closed.");
closeAll();
}
}
public static void closeAll() {
for (Frame f : JFrame.getFrames())
f.dispose();
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto
package io.grafeas.v1beta1;
/**
*
*
* <pre>
* Request to list occurrences for a note.
* </pre>
*
* Protobuf type {@code grafeas.v1beta1.ListNoteOccurrencesRequest}
*/
public final class ListNoteOccurrencesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:grafeas.v1beta1.ListNoteOccurrencesRequest)
ListNoteOccurrencesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListNoteOccurrencesRequest.newBuilder() to construct.
private ListNoteOccurrencesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListNoteOccurrencesRequest() {
name_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListNoteOccurrencesRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ListNoteOccurrencesRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
filter_ = s;
break;
}
case 24:
{
pageSize_ = input.readInt32();
break;
}
case 34:
{
java.lang.String s = input.readStringRequireUtf8();
pageToken_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1beta1.ListNoteOccurrencesRequest.class,
io.grafeas.v1beta1.ListNoteOccurrencesRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* The name of the note to list occurrences for in the form of
* `projects/[PROVIDER_ID]/notes/[NOTE_ID]`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the note to list occurrences for in the form of
* `projects/[PROVIDER_ID]/notes/[NOTE_ID]`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
private volatile java.lang.Object filter_;
/**
*
*
* <pre>
* The filter expression.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* The filter expression.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_;
/**
*
*
* <pre>
* Number of occurrences to return in the list.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
private volatile java.lang.Object pageToken_;
/**
*
*
* <pre>
* Token to provide to skip to a particular spot in the list.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to provide to skip to a particular spot in the list.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!getFilterBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!getPageTokenBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!getFilterBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!getPageTokenBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof io.grafeas.v1beta1.ListNoteOccurrencesRequest)) {
return super.equals(obj);
}
io.grafeas.v1beta1.ListNoteOccurrencesRequest other =
(io.grafeas.v1beta1.ListNoteOccurrencesRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(io.grafeas.v1beta1.ListNoteOccurrencesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request to list occurrences for a note.
* </pre>
*
* Protobuf type {@code grafeas.v1beta1.ListNoteOccurrencesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:grafeas.v1beta1.ListNoteOccurrencesRequest)
io.grafeas.v1beta1.ListNoteOccurrencesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
io.grafeas.v1beta1.ListNoteOccurrencesRequest.class,
io.grafeas.v1beta1.ListNoteOccurrencesRequest.Builder.class);
}
// Construct using io.grafeas.v1beta1.ListNoteOccurrencesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return io.grafeas.v1beta1.Grafeas
.internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_descriptor;
}
@java.lang.Override
public io.grafeas.v1beta1.ListNoteOccurrencesRequest getDefaultInstanceForType() {
return io.grafeas.v1beta1.ListNoteOccurrencesRequest.getDefaultInstance();
}
@java.lang.Override
public io.grafeas.v1beta1.ListNoteOccurrencesRequest build() {
io.grafeas.v1beta1.ListNoteOccurrencesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public io.grafeas.v1beta1.ListNoteOccurrencesRequest buildPartial() {
io.grafeas.v1beta1.ListNoteOccurrencesRequest result =
new io.grafeas.v1beta1.ListNoteOccurrencesRequest(this);
result.name_ = name_;
result.filter_ = filter_;
result.pageSize_ = pageSize_;
result.pageToken_ = pageToken_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof io.grafeas.v1beta1.ListNoteOccurrencesRequest) {
return mergeFrom((io.grafeas.v1beta1.ListNoteOccurrencesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(io.grafeas.v1beta1.ListNoteOccurrencesRequest other) {
if (other == io.grafeas.v1beta1.ListNoteOccurrencesRequest.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
io.grafeas.v1beta1.ListNoteOccurrencesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (io.grafeas.v1beta1.ListNoteOccurrencesRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of the note to list occurrences for in the form of
* `projects/[PROVIDER_ID]/notes/[NOTE_ID]`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the note to list occurrences for in the form of
* `projects/[PROVIDER_ID]/notes/[NOTE_ID]`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the note to list occurrences for in the form of
* `projects/[PROVIDER_ID]/notes/[NOTE_ID]`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the note to list occurrences for in the form of
* `projects/[PROVIDER_ID]/notes/[NOTE_ID]`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the note to list occurrences for in the form of
* `projects/[PROVIDER_ID]/notes/[NOTE_ID]`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* The filter expression.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The filter expression.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The filter expression.
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The filter expression.
* </pre>
*
* <code>string filter = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
onChanged();
return this;
}
/**
*
*
* <pre>
* The filter expression.
* </pre>
*
* <code>string filter = 2;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Number of occurrences to return in the list.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Number of occurrences to return in the list.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Number of occurrences to return in the list.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Token to provide to skip to a particular spot in the list.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to provide to skip to a particular spot in the list.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to provide to skip to a particular spot in the list.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to provide to skip to a particular spot in the list.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to provide to skip to a particular spot in the list.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:grafeas.v1beta1.ListNoteOccurrencesRequest)
}
// @@protoc_insertion_point(class_scope:grafeas.v1beta1.ListNoteOccurrencesRequest)
private static final io.grafeas.v1beta1.ListNoteOccurrencesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new io.grafeas.v1beta1.ListNoteOccurrencesRequest();
}
public static io.grafeas.v1beta1.ListNoteOccurrencesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListNoteOccurrencesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListNoteOccurrencesRequest>() {
@java.lang.Override
public ListNoteOccurrencesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ListNoteOccurrencesRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ListNoteOccurrencesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListNoteOccurrencesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public io.grafeas.v1beta1.ListNoteOccurrencesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.cipres.treebase.domain.taxon;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import javax.persistence.AttributeOverride;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Transient;
import mesquite.lib.StringUtil;
import org.cipres.treebase.TreebaseUtil;
import org.cipres.treebase.domain.AbstractPersistedObject;
import org.cipres.treebase.domain.TBPersistable;
import org.cipres.treebase.domain.study.Study;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.annotations.IndexColumn;
/**
* TaxonLabelSet.java
*
* Created on Mar 14, 2006
*
* @author Jin Ruan
*
*/
@Entity
@Table(name = "TaxonLabelSET")
@AttributeOverride(name = "id", column = @Column(name = "TaxonLabelSET_ID"))
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, region = "taxonCache")
public class TaxonLabelSet extends AbstractPersistedObject {
private static final long serialVersionUID = 5373942900819725860L;
private String mTitle;
private boolean mTaxa;
private Study mStudy;
private List<TaxonLabel> mTaxonLabelList;
/**
* Constructor.
*/
public TaxonLabelSet() {
super();
mTaxonLabelList = new ArrayList<TaxonLabel>();
}
/**
* Return the Title field.
*
* @return String
*/
@Column(name = "Title", length = TBPersistable.COLUMN_LENGTH_STRING)
public String getTitle() {
if ( ! TreebaseUtil.isEmpty(mTitle) ) {
return mTitle;
}
else {
return "TaxonLabelSet" + getId();
}
}
/**
* Set the Title field.
*/
public void setTitle(String pNewTitle) {
mTitle = pNewTitle;
}
/**
* Indicate whether the taxonlabelset represents a "taxa".
*
* @return boolean
*/
@Column(name = "Taxa")
public boolean isTaxa() {
return mTaxa;
}
/**
* Set the Taxa field.
*/
public void setTaxa(boolean pNewTaxa) {
mTaxa = pNewTaxa;
}
/**
* Return the Study field.
*
* @return Study
*/
@ManyToOne(cascade = {CascadeType.PERSIST, CascadeType.MERGE})
@JoinColumn(name = "STUDY_ID", nullable = true)
public Study getStudy() {
return mStudy;
}
/**
* Set the Study field.
*/
public void setStudy(Study pNewStudy) {
mStudy = pNewStudy;
}
/**
* Append a new TaxonLabel to the end of the list.
*
* <p>
* <b>Warning:</b> does not check to see if the new TaxonLabel is already in the set.
* </p>
*
* Creation date: Mar 14, 2006
*
* @param pTaxonLabel TaxonLabel
*/
public void addPhyloTaxonLabel(TaxonLabel pTaxonLabel) {
if (pTaxonLabel != null) {
getTaxonLabelList().add(pTaxonLabel);
}
}
/**
* Clear taxon label list.
*
* Creation date: Mar 14, 2006
*/
public void clearTaxonLabelList() {
getTaxonLabelList().clear();
}
/**
* Remove the TaxonLabel.
*
* Creation date: Mar 14, 2006
*
* @param pTaxonLabel TaxonLabel
*/
public void removePhyloTaxonLabel(TaxonLabel pTaxonLabel) {
if (pTaxonLabel != null) {
getTaxonLabelList().remove(pTaxonLabel);
}
}
/**
* Return the TaxonLabelList field.
*
* @return List<PhyloTaxonLabel>
*/
@ManyToMany(cascade = {CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REFRESH})
@JoinTable(name = "TaxonLabelSET_TaxonLabel", joinColumns = {@JoinColumn(name = "TaxonLabelSET_ID")}, inverseJoinColumns = @JoinColumn(name = "TaxonLabel_ID"))
@IndexColumn(name = "TaxonLabel_ORDER")
protected List<TaxonLabel> getTaxonLabelList() {
return mTaxonLabelList;
}
/**
* Return a read only list of the taxon labels.
*
* @return
*/
@Transient
public List<TaxonLabel> getTaxonLabelsReadOnly() {
return Collections.unmodifiableList(mTaxonLabelList);
}
/**
* Set the TaxonLabelList field.
*/
protected void setTaxonLabelList(List<TaxonLabel> pTaxonLabelList) {
mTaxonLabelList = pTaxonLabelList;
}
/**
* Build the nexus block for the taxon label set.
*
* @param pBuilder
* @param pOnePerLine whether to output in one taxon label per line
* @param pLineNumber in one taxon label per line, whether to add line number (like the TREES
* TRANSLATE section)
*/
public void buildNexusBlockTaxa(StringBuilder pBuilder, boolean pOnePerLine, boolean pLineNumber) {
List<TaxonLabel> txnlbllist = new ArrayList<TaxonLabel>(getTaxonLabelList());
sortByTaxonLabel(txnlbllist);
int numoftxnlbls = txnlbllist.size();
pBuilder.append("BEGIN TAXA;\n");
pBuilder.append(" TITLE " + StringUtil.tokenize(getTitle().replaceAll("Input|Output", "")) + ";\n");
pBuilder.append(" DIMENSIONS NTAX=" + numoftxnlbls + ";\n");
pBuilder.append(" TAXLABELS\n");
pBuilder.append(" ");
if (pOnePerLine) {
if (pLineNumber) {
buildTaxonWithLineNumber(pBuilder);
} else {
for (int z = 0; z < numoftxnlbls; z++) {
pBuilder.append(StringUtil.tokenize(txnlbllist.get(z).getTaxonLabel()));
pBuilder.append("\n ");
// out.write(txnlbllist.get(z).getTaxonLabel().replaceAll(" ", "_") + " ");
}
}
} else {
for (int z = 0; z < numoftxnlbls; z++) {
pBuilder.append(StringUtil.tokenize(txnlbllist.get(z).getTaxonLabel()));
pBuilder.append(" ");
}
}
pBuilder.append(";\nEND;\n\n");
}
/**
* Build the taxon labels. One label per line, with line number.
*
* @param pBuilder
*/
public void buildTaxonWithLineNumber(StringBuilder pBuilder) {
List<TaxonLabel> txnlbllist = getTaxonLabelList();
int numoftxnlbls = txnlbllist.size();
for (int z = 0; z < numoftxnlbls; z++) {
pBuilder.append(" ");
int p = z + 1;
pBuilder.append(String.valueOf(p));
if (p < 10) {
pBuilder.append(" ");
} else if (p >= 10 && p < 100) {
pBuilder.append(" ");
} else if (p >= 100 && p < 1000) {
pBuilder.append(" ");
} else {
pBuilder.append(" ");
}
pBuilder.append(StringUtil.tokenize(txnlbllist.get(z).getTaxonLabel()));
if (p == numoftxnlbls) {
pBuilder.append(";");
} else {
pBuilder.append(",");
}
pBuilder.append("\n");
}
}
@Transient
public String getLabel() {
return getTitle();
}
public void sortByTaxonLabel(List<TaxonLabel> tList)
{
java.util.Collections.sort(tList, new Comparator<TaxonLabel>() {
public int compare(TaxonLabel pObject1, TaxonLabel pObject2) {
String id1 = pObject1.getTaxonLabel();
String id2 = pObject2.getTaxonLabel();
return id1.compareTo(id2);
}
});
}
public void sortByTaxonLabelLength(List<TaxonLabel> tList)
{
java.util.Collections.sort(tList, new Comparator<TaxonLabel>() {
public int compare(TaxonLabel pObject1, TaxonLabel pObject2) {
return pObject2.getTaxonLabel().length() - pObject1.getTaxonLabel().length();
}
});
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.tree;
import com.intellij.extapi.psi.StubBasedPsiElementBase;
import com.intellij.lang.ASTNode;
import com.intellij.lexer.Lexer;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.util.Couple;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.StubBuilder;
import com.intellij.psi.impl.DebugUtil;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.psi.stubs.IStubElementType;
import com.intellij.psi.stubs.StubBase;
import com.intellij.psi.stubs.StubElement;
import com.intellij.psi.stubs.StubTree;
import com.intellij.psi.templateLanguages.OuterLanguageElement;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.IStrongWhitespaceHolderElementType;
import com.intellij.psi.tree.IStubFileElementType;
import com.intellij.psi.tree.TokenSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.concurrent.atomic.AtomicBoolean;
public class TreeUtil {
private static final Key<String> UNCLOSED_ELEMENT_PROPERTY = Key.create("UNCLOSED_ELEMENT_PROPERTY");
public static void ensureParsed(ASTNode node) {
if (node != null) {
node.getFirstChildNode();
}
}
public static void ensureParsedRecursively(@NotNull ASTNode node) {
((TreeElement)node).acceptTree(new RecursiveTreeElementWalkingVisitor() { });
}
public static void ensureParsedRecursivelyCheckingProgress(@NotNull ASTNode node, @NotNull final ProgressIndicator indicator) {
((TreeElement)node).acceptTree(new RecursiveTreeElementWalkingVisitor() {
@Override
public void visitLeaf(LeafElement leaf) {
indicator.checkCanceled();
}
});
}
public static boolean isCollapsedChameleon(ASTNode node) {
return node instanceof LazyParseableElement && !((LazyParseableElement)node).isParsed();
}
@Nullable
public static ASTNode findChildBackward(ASTNode parent, IElementType type) {
if (DebugUtil.CHECK_INSIDE_ATOMIC_ACTION_ENABLED) {
ApplicationManager.getApplication().assertReadAccessAllowed();
}
for (ASTNode element = parent.getLastChildNode(); element != null; element = element.getTreePrev()) {
if (element.getElementType() == type) return element;
}
return null;
}
@Nullable
public static ASTNode skipElements(ASTNode element, TokenSet types) {
while (true) {
if (element == null) return null;
if (!types.contains(element.getElementType())) break;
element = element.getTreeNext();
}
return element;
}
@Nullable
public static ASTNode skipElementsBack(@Nullable ASTNode element, TokenSet types) {
if (element == null) return null;
if (!types.contains(element.getElementType())) return element;
ASTNode parent = element.getTreeParent();
ASTNode prev = element;
while (prev instanceof CompositeElement) {
if (!types.contains(prev.getElementType())) return prev;
prev = prev.getTreePrev();
}
if (prev == null) return null;
ASTNode firstChildNode = parent.getFirstChildNode();
ASTNode lastRelevant = null;
while (firstChildNode != prev) {
if (!types.contains(firstChildNode.getElementType())) lastRelevant = firstChildNode;
firstChildNode = firstChildNode.getTreeNext();
}
return lastRelevant;
}
@Nullable
public static ASTNode findParent(ASTNode element, IElementType type) {
for (ASTNode parent = element.getTreeParent(); parent != null; parent = parent.getTreeParent()) {
if (parent.getElementType() == type) return parent;
}
return null;
}
@Nullable
public static ASTNode findParent(ASTNode element, TokenSet types) {
for (ASTNode parent = element.getTreeParent(); parent != null; parent = parent.getTreeParent()) {
if (types.contains(parent.getElementType())) return parent;
}
return null;
}
@Nullable
public static ASTNode findParent(@NotNull ASTNode element, @NotNull TokenSet types, @Nullable TokenSet stopAt) {
for (ASTNode parent = element.getTreeParent(); parent != null; parent = parent.getTreeParent()) {
if (types.contains(parent.getElementType())) return parent;
if (stopAt != null && stopAt.contains(parent.getElementType())) return null;
}
return null;
}
@Nullable
public static LeafElement findFirstLeaf(ASTNode element) {
return (LeafElement)findFirstLeaf(element, true);
}
public static ASTNode findFirstLeaf(ASTNode element, boolean expandChameleons) {
if (element instanceof LeafElement || !expandChameleons && isCollapsedChameleon(element)) {
return element;
}
else {
for (ASTNode child = element.getFirstChildNode(); child != null; child = child.getTreeNext()) {
ASTNode leaf = findFirstLeaf(child, expandChameleons);
if (leaf != null) return leaf;
}
return null;
}
}
@Nullable
public static ASTNode findLastLeaf(ASTNode element) {
return findLastLeaf(element, true);
}
public static ASTNode findLastLeaf(ASTNode element, boolean expandChameleons) {
if (element instanceof LeafElement || !expandChameleons && isCollapsedChameleon(element)) {
return element;
}
for (ASTNode child = element.getLastChildNode(); child != null; child = child.getTreePrev()) {
ASTNode leaf = findLastLeaf(child);
if (leaf != null) return leaf;
}
return null;
}
@Nullable
public static ASTNode findSibling(ASTNode start, IElementType elementType) {
ASTNode child = start;
while (true) {
if (child == null) return null;
if (child.getElementType() == elementType) return child;
child = child.getTreeNext();
}
}
@Nullable
public static ASTNode findSibling(ASTNode start, TokenSet types) {
ASTNode child = start;
while (true) {
if (child == null) return null;
if (types.contains(child.getElementType())) return child;
child = child.getTreeNext();
}
}
@Nullable
public static ASTNode findSiblingBackward(ASTNode start, IElementType elementType) {
ASTNode child = start;
while (true) {
if (child == null) return null;
if (child.getElementType() == elementType) return child;
child = child.getTreePrev();
}
}
@Nullable
public static ASTNode findSiblingBackward(ASTNode start, TokenSet types) {
ASTNode child = start;
while (true) {
if (child == null) return null;
if (types.contains(child.getElementType())) return child;
child = child.getTreePrev();
}
}
@Nullable
public static ASTNode findCommonParent(ASTNode one, ASTNode two) {
// optimization
if (one == two) return one;
final Set<ASTNode> parents = new HashSet<>(20);
while (one != null) {
parents.add(one);
one = one.getTreeParent();
}
while (two != null) {
if (parents.contains(two)) return two;
two = two.getTreeParent();
}
return null;
}
public static Couple<ASTNode> findTopmostSiblingParents(ASTNode one, ASTNode two) {
if (one == two) return Couple.of(null, null);
LinkedList<ASTNode> oneParents = new LinkedList<>();
while (one != null) {
oneParents.add(one);
one = one.getTreeParent();
}
LinkedList<ASTNode> twoParents = new LinkedList<>();
while (two != null) {
twoParents.add(two);
two = two.getTreeParent();
}
do {
one = oneParents.pollLast();
two = twoParents.pollLast();
}
while (one == two && one != null);
return Couple.of(one, two);
}
public static void clearCaches(@NotNull final TreeElement tree) {
tree.acceptTree(new RecursiveTreeElementWalkingVisitor(false) {
@Override
protected void visitNode(final TreeElement element) {
element.clearCaches();
super.visitNode(element);
}
});
}
@Nullable
public static ASTNode nextLeaf(@NotNull final ASTNode node) {
return nextLeaf((TreeElement)node, null);
}
public static final Key<FileElement> CONTAINING_FILE_KEY_AFTER_REPARSE = Key.create("CONTAINING_FILE_KEY_AFTER_REPARSE");
public static FileElement getFileElement(TreeElement element) {
TreeElement parent = element;
while (parent != null && !(parent instanceof FileElement)) {
parent = parent.getTreeParent();
}
if (parent == null) {
parent = element.getUserData(CONTAINING_FILE_KEY_AFTER_REPARSE);
}
return (FileElement)parent;
}
@Nullable
public static ASTNode prevLeaf(final ASTNode node) {
return prevLeaf((TreeElement)node, null);
}
public static boolean isStrongWhitespaceHolder(IElementType type) {
return type instanceof IStrongWhitespaceHolderElementType;
}
public static String getTokenText(Lexer lexer) {
return lexer.getBufferSequence().subSequence(lexer.getTokenStart(), lexer.getTokenEnd()).toString();
}
@Nullable
public static LeafElement nextLeaf(@NotNull TreeElement start, CommonParentState commonParent) {
return (LeafElement)nextLeaf(start, commonParent, null, true);
}
@Nullable
public static TreeElement nextLeaf(@NotNull TreeElement start,
CommonParentState commonParent,
IElementType searchedType,
boolean expandChameleons) {
TreeElement element = start;
while (element != null) {
if (commonParent != null) {
commonParent.startLeafBranchStart = element;
initStrongWhitespaceHolder(commonParent, element, true);
}
TreeElement nextTree = element;
TreeElement next = null;
while (next == null && (nextTree = nextTree.getTreeNext()) != null) {
if (nextTree.getElementType() == searchedType) {
return nextTree;
}
next = findFirstLeafOrType(nextTree, searchedType, commonParent, expandChameleons);
}
if (next != null) {
if (commonParent != null) commonParent.nextLeafBranchStart = nextTree;
return next;
}
element = element.getTreeParent();
}
return null;
}
private static void initStrongWhitespaceHolder(CommonParentState commonParent, ASTNode start, boolean slopeSide) {
if (start instanceof CompositeElement &&
(isStrongWhitespaceHolder(start.getElementType()) || slopeSide && start.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null)) {
commonParent.strongWhiteSpaceHolder = (CompositeElement)start;
commonParent.isStrongElementOnRisingSlope = slopeSide;
}
}
@Nullable
private static TreeElement findFirstLeafOrType(@NotNull TreeElement element,
final IElementType searchedType,
final CommonParentState commonParent,
final boolean expandChameleons) {
class MyVisitor extends RecursiveTreeElementWalkingVisitor {
private TreeElement result;
private MyVisitor(boolean doTransform) {
super(doTransform);
}
@Override
protected void visitNode(TreeElement node) {
if (result != null) return;
if (commonParent != null) {
initStrongWhitespaceHolder(commonParent, node, false);
}
if (!expandChameleons && isCollapsedChameleon(node) || node instanceof LeafElement || node.getElementType() == searchedType) {
result = node;
return;
}
super.visitNode(node);
}
}
MyVisitor visitor = new MyVisitor(expandChameleons);
element.acceptTree(visitor);
return visitor.result;
}
@Nullable
public static ASTNode prevLeaf(TreeElement start, @Nullable CommonParentState commonParent) {
while (true) {
if (start == null) return null;
if (commonParent != null) {
if (commonParent.strongWhiteSpaceHolder != null && start.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null) {
commonParent.strongWhiteSpaceHolder = (CompositeElement)start;
}
commonParent.nextLeafBranchStart = start;
}
ASTNode prevTree = start;
ASTNode prev = null;
while (prev == null && (prevTree = prevTree.getTreePrev()) != null) {
prev = findLastLeaf(prevTree);
}
if (prev != null) {
if (commonParent != null) commonParent.startLeafBranchStart = (TreeElement)prevTree;
return prev;
}
start = start.getTreeParent();
}
}
@Nullable
public static ASTNode nextLeaf(@Nullable ASTNode start, boolean expandChameleons) {
while (start != null) {
for (ASTNode each = start.getTreeNext(); each != null; each = each.getTreeNext()) {
ASTNode leaf = findFirstLeaf(each, expandChameleons);
if (leaf != null) return leaf;
}
start = start.getTreeParent();
}
return null;
}
@Nullable
public static ASTNode prevLeaf(@Nullable ASTNode start, boolean expandChameleons) {
while (start != null) {
for (ASTNode each = start.getTreePrev(); each != null; each = each.getTreePrev()) {
ASTNode leaf = findLastLeaf(each, expandChameleons);
if (leaf != null) return leaf;
}
start = start.getTreeParent();
}
return null;
}
@Nullable
public static ASTNode getLastChild(ASTNode element) {
ASTNode child = element;
while (child != null) {
element = child;
child = element.getLastChildNode();
}
return element;
}
public static boolean containsOuterLanguageElements(@NotNull ASTNode node) {
AtomicBoolean result = new AtomicBoolean(false);
((TreeElement)node).acceptTree(new RecursiveTreeElementWalkingVisitor() {
@Override
protected void visitNode(TreeElement element) {
if (element instanceof OuterLanguageElement) {
result.set(true);
stopWalking();
return;
}
super.visitNode(element);
}
});
return result.get();
}
public static final class CommonParentState {
TreeElement startLeafBranchStart;
public ASTNode nextLeafBranchStart;
CompositeElement strongWhiteSpaceHolder;
boolean isStrongElementOnRisingSlope = true;
}
public static class StubBindingException extends RuntimeException {
StubBindingException(String message) {
super(message);
}
}
public static void bindStubsToTree(@NotNull StubTree stubTree, @NotNull FileElement tree) throws StubBindingException {
List<Pair<StubBase, TreeElement>> bindings = calcStubAstBindings(stubTree, tree);
for (int i = 0; i < bindings.size(); i++) {
Pair<StubBase, TreeElement> pair = bindings.get(i);
StubBasedPsiElementBase psi = (StubBasedPsiElementBase)pair.second.getPsi();
//noinspection unchecked
pair.first.setPsi(psi);
psi.setStubIndex(i + 1);
}
}
@NotNull
public static List<Pair<StubBase, TreeElement>> calcStubAstBindings(@NotNull StubTree stubTree, @NotNull FileElement tree) throws StubBindingException {
List<Pair<StubBase, TreeElement>> bindings = new ArrayList<>();
final ListIterator<StubElement<?>> stubs = stubTree.getPlainList().listIterator();
stubs.next(); // skip file root stub
PsiFileImpl file = (PsiFileImpl)tree.getPsi();
final IStubFileElementType type = file.getElementTypeForStubBuilder();
assert type != null;
final StubBuilder builder = type.getBuilder();
tree.acceptTree(new RecursiveTreeElementWalkingVisitor() {
@Override
protected void visitNode(TreeElement node) {
CompositeElement parent = node.getTreeParent();
if (parent != null && builder.skipChildProcessingWhenBuildingStubs(parent, node)) {
return;
}
IElementType type = node.getElementType();
if (type instanceof IStubElementType && ((IStubElementType)type).shouldCreateStub(node)) {
final StubElement stub = stubs.hasNext() ? stubs.next() : null;
if (stub == null || stub.getStubType() != type) {
throw new StubBindingException("stub:" + stub + ", AST:" + type);
}
bindings.add(Pair.create((StubBase)stub, node));
}
super.visitNode(node);
}
});
if (stubs.hasNext()) {
throw new StubBindingException("Stub list in " + file.getName() + " has more elements than PSI");
}
return bindings;
}
@Nullable
public static ASTNode skipWhitespaceAndComments(final ASTNode node, boolean forward) {
return skipWhitespaceCommentsAndTokens(node, TokenSet.EMPTY, forward);
}
@Nullable
public static ASTNode skipWhitespaceCommentsAndTokens(final ASTNode node, @NotNull TokenSet alsoSkip, boolean forward) {
ASTNode element = node;
while (true) {
if (element == null) return null;
if (!isWhitespaceOrComment(element) && !alsoSkip.contains(element.getElementType())) break;
element = forward ? element.getTreeNext(): element.getTreePrev();
}
return element;
}
public static boolean isWhitespaceOrComment(ASTNode element) {
return element.getPsi() instanceof PsiWhiteSpace || element.getPsi() instanceof PsiComment;
}
}
| |
/*
* Copyright (c) 2014 Personal-Health-Monitoring-System
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jeremyfeinstein.slidingmenu.lib.actionbar;
import android.app.Activity;
import android.graphics.*;
import android.graphics.drawable.Drawable;
import com.jeremyfeinstein.slidingmenu.lib.SlidingMenu;
/**
*
* @author Hannes Dorfmann
*
*/
public class ActionBarSlideIcon {
/**
* This is a drawable can slide. This will wrap the home icon and allows
* that to slide
*
* @author Hannes Dorfmann
*
*/
private static class SlideDrawable extends Drawable implements
Drawable.Callback {
private Drawable mWrapped;
private float mOffset;
private float mOffsetBy;
private final Rect mTmpRect = new Rect();
public SlideDrawable(Drawable wrapped) {
mWrapped = wrapped;
}
public void setOffset(float offset) {
mOffset = offset;
invalidateSelf();
}
public float getOffset() {
return mOffset;
}
public void setOffsetBy(float offsetBy) {
mOffsetBy = offsetBy;
invalidateSelf();
}
@Override
public void draw(Canvas canvas) {
mWrapped.copyBounds(mTmpRect);
canvas.save();
canvas.translate(mOffsetBy * mTmpRect.width() * -mOffset, 0);
mWrapped.draw(canvas);
canvas.restore();
}
@Override
public void setChangingConfigurations(int configs) {
mWrapped.setChangingConfigurations(configs);
}
@Override
public int getChangingConfigurations() {
return mWrapped.getChangingConfigurations();
}
@Override
public void setDither(boolean dither) {
mWrapped.setDither(dither);
}
@Override
public void setFilterBitmap(boolean filter) {
mWrapped.setFilterBitmap(filter);
}
@Override
public void setAlpha(int alpha) {
mWrapped.setAlpha(alpha);
}
@Override
public void setColorFilter(ColorFilter cf) {
mWrapped.setColorFilter(cf);
}
@Override
public void setColorFilter(int color, PorterDuff.Mode mode) {
mWrapped.setColorFilter(color, mode);
}
@Override
public void clearColorFilter() {
mWrapped.clearColorFilter();
}
@Override
public boolean isStateful() {
return mWrapped.isStateful();
}
@Override
public boolean setState(int[] stateSet) {
return mWrapped.setState(stateSet);
}
@Override
public int[] getState() {
return mWrapped.getState();
}
@Override
public Drawable getCurrent() {
return mWrapped.getCurrent();
}
@Override
public boolean setVisible(boolean visible, boolean restart) {
return super.setVisible(visible, restart);
}
@Override
public int getOpacity() {
return mWrapped.getOpacity();
}
@Override
public Region getTransparentRegion() {
return mWrapped.getTransparentRegion();
}
@Override
protected boolean onStateChange(int[] state) {
mWrapped.setState(state);
return super.onStateChange(state);
}
@Override
protected void onBoundsChange(Rect bounds) {
super.onBoundsChange(bounds);
mWrapped.setBounds(bounds);
}
@Override
public int getIntrinsicWidth() {
return mWrapped.getIntrinsicWidth();
}
@Override
public int getIntrinsicHeight() {
return mWrapped.getIntrinsicHeight();
}
@Override
public int getMinimumWidth() {
return mWrapped.getMinimumWidth();
}
@Override
public int getMinimumHeight() {
return mWrapped.getMinimumHeight();
}
@Override
public boolean getPadding(Rect padding) {
return mWrapped.getPadding(padding);
}
@Override
public ConstantState getConstantState() {
return super.getConstantState();
}
@Override
public void invalidateDrawable(Drawable who) {
if (who == mWrapped) {
invalidateSelf();
}
}
@Override
public void scheduleDrawable(Drawable who, Runnable what, long when) {
if (who == mWrapped) {
scheduleSelf(what, when);
}
}
@Override
public void unscheduleDrawable(Drawable who, Runnable what) {
if (who == mWrapped) {
unscheduleSelf(what);
}
}
}
/**
* The {@link com.jeremyfeinstein.slidingmenu.lib.actionbar.ActionBarSlideIcon.SlideDrawable} that will replace the up Indicator
*/
private SlideDrawable mSlideDrawble;
private ActionBarHelper mActionBarHelper = null;
private int mCloseContentDescription;
private int mOpenContentDescription;
/**
* Creates a new {@link com.jeremyfeinstein.slidingmenu.lib.actionbar.ActionBarSlideIcon}
*
* @param activity
* The activity
* @param slideDrawable
* The {@link Drawable} that can slide
* @param openContentDescRes
* A String resource to describe the "open drawer" action for
* accessibility
* @param closeContentDescRes
* A String resource to describe the "close drawer" action for
* accessibility
*/
public ActionBarSlideIcon(Activity activity, Drawable slideDrawable,
int openContentDescRes, int closeContentDescRes) {
initActionBar(activity, slideDrawable, openContentDescRes,
closeContentDescRes);
}
/**
* Creates a new {@link com.jeremyfeinstein.slidingmenu.lib.actionbar.ActionBarSlideIcon}
*
* @param activity
* The activity
* @param drawableRes
* The resource id of the drawable that will replace the up
* indicator icon
* @param openContentDescRes
* A String resource to describe the "open drawer" action for
* accessibility
* @param closeContentDescRes
* A String resource to describe the "close drawer" action for
* accessibility
*/
public ActionBarSlideIcon(Activity activity, int drawableRes,
int openContentDescRes, int closeContentDescRes) {
this(activity, activity.getResources().getDrawable(drawableRes),
openContentDescRes, closeContentDescRes);
}
/**
* Creates a new {@link com.jeremyfeinstein.slidingmenu.lib.actionbar.ActionBarSlideIcon}. Instead of specifying the
* drawable that should be replace the up indicator the default up indicator
* (specified in the apps theme) will be used to slide
*
* @param activity
* @param openContentDescRes
* @param closeContentDescRes
*/
public ActionBarSlideIcon(Activity activity, int openContentDescRes,
int closeContentDescRes) {
if (mActionBarHelper == null)
mActionBarHelper = new ActionBarHelper(activity);
Drawable themedIcon = mActionBarHelper.getThemeUpIndicator();
if (themedIcon == null)
throw new IllegalStateException(
"The theme of you app has not specified an up indicator icon");
initActionBar(activity, themedIcon, openContentDescRes,
closeContentDescRes);
}
/**
* Initializes the required components. This method is called from the
* constructors.
*
* @param activity
* @param slideDrawable
* @param openContentDescRes
* @param closeContentDescRes
*/
private void initActionBar(Activity activity, Drawable slideDrawable,
int openContentDescRes, int closeContentDescRes) {
mCloseContentDescription = closeContentDescRes;
mOpenContentDescription = openContentDescRes;
mSlideDrawble = new SlideDrawable(slideDrawable);
mSlideDrawble.setOffsetBy(1.f / 3);
if (mActionBarHelper == null)
mActionBarHelper = new ActionBarHelper(activity);
mActionBarHelper.setActionBarUpIndicator(mSlideDrawble,
mOpenContentDescription);
mActionBarHelper.setDisplayShowHomeAsUpEnabled(true);
}
/**
* Get the {@link Drawable} that will slide
*
* @return
*/
public Drawable getDrawable() {
return mSlideDrawble.mWrapped;
}
/**
* Set the {@link Drawable} of the ActionBar
*
* @param drawable
*/
public void setDrawable(Drawable drawable) {
mSlideDrawble.mWrapped = drawable;
mSlideDrawble.invalidateSelf();
}
/**
* Set the offset, how far the menu has been slide out. This method should
* only be accessed from {@link SlidingMenu}
*
* @param offset
*/
public void setSlideOffset(float offset) {
if (offset == 0)
mActionBarHelper.setActionBarUpDescription(mOpenContentDescription);
if (offset == 1)
mActionBarHelper
.setActionBarUpDescription(mCloseContentDescription);
float glyphOffset = mSlideDrawble.getOffset();
if (offset > 0.5f) {
glyphOffset = Math.max(glyphOffset,
Math.max(0.f, offset - 0.5f) * 2);
} else {
glyphOffset = Math.min(glyphOffset, offset * 2);
}
mSlideDrawble.setOffset(glyphOffset);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.view;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.concurrent.ScheduledExecutors;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.compaction.CompactionInfo;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.compaction.OperationType;
import org.apache.cassandra.db.compaction.CompactionInfo.Unit;
import org.apache.cassandra.db.lifecycle.SSTableSet;
import org.apache.cassandra.db.partitions.*;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.io.sstable.ReducingKeyIterator;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.repair.SystemDistributedKeyspace;
import org.apache.cassandra.service.StorageProxy;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.Pair;
import org.apache.cassandra.utils.UUIDGen;
import org.apache.cassandra.utils.concurrent.Refs;
public class ViewBuilder extends CompactionInfo.Holder
{
private final ColumnFamilyStore baseCfs;
private final View view;
private final UUID compactionId;
private volatile Token prevToken = null;
private static final Logger logger = LoggerFactory.getLogger(ViewBuilder.class);
public ViewBuilder(ColumnFamilyStore baseCfs, View view)
{
this.baseCfs = baseCfs;
this.view = view;
compactionId = UUIDGen.getTimeUUID();
}
private void buildKey(DecoratedKey key)
{
ReadQuery selectQuery = view.getReadQuery();
if (!selectQuery.selectsKey(key))
{
logger.trace("Skipping {}, view query filters", key);
return;
}
int nowInSec = FBUtilities.nowInSeconds();
SinglePartitionReadCommand command = view.getSelectStatement().internalReadForView(key, nowInSec);
// We're rebuilding everything from what's on disk, so we read everything, consider that as new updates
// and pretend that there is nothing pre-existing.
UnfilteredRowIterator empty = UnfilteredRowIterators.noRowsIterator(baseCfs.metadata, key, Rows.EMPTY_STATIC_ROW, DeletionTime.LIVE, false);
try (ReadExecutionController orderGroup = command.executionController();
UnfilteredRowIterator data = UnfilteredPartitionIterators.getOnlyElement(command.executeLocally(orderGroup), command))
{
Iterator<Collection<Mutation>> mutations = baseCfs.keyspace.viewManager
.forTable(baseCfs.metadata)
.generateViewUpdates(Collections.singleton(view), data, empty, nowInSec, true);
AtomicLong noBase = new AtomicLong(Long.MAX_VALUE);
mutations.forEachRemaining(m -> StorageProxy.mutateMV(key.getKey(), m, true, noBase, System.nanoTime()));
}
}
public void run()
{
logger.debug("Starting view builder for {}.{}", baseCfs.metadata.ksName, view.name);
logger.trace("Running view builder for {}.{}", baseCfs.metadata.ksName, view.name);
UUID localHostId = SystemKeyspace.getLocalHostId();
String ksname = baseCfs.metadata.ksName, viewName = view.name;
if (SystemKeyspace.isViewBuilt(ksname, viewName))
{
logger.debug("View already marked built for {}.{}", baseCfs.metadata.ksName, view.name);
if (!SystemKeyspace.isViewStatusReplicated(ksname, viewName))
updateDistributed(ksname, viewName, localHostId);
return;
}
Iterable<Range<Token>> ranges = StorageService.instance.getLocalRanges(baseCfs.metadata.ksName);
final Pair<Integer, Token> buildStatus = SystemKeyspace.getViewBuildStatus(ksname, viewName);
Token lastToken;
Function<org.apache.cassandra.db.lifecycle.View, Iterable<SSTableReader>> function;
if (buildStatus == null)
{
logger.debug("Starting new view build. flushing base table {}.{}", baseCfs.metadata.ksName, baseCfs.name);
lastToken = null;
//We don't track the generation number anymore since if a rebuild is stopped and
//restarted the max generation filter may yield no sstables due to compactions.
//We only care about max generation *during* a build, not across builds.
//see CASSANDRA-13405
SystemKeyspace.beginViewBuild(ksname, viewName, 0);
}
else
{
lastToken = buildStatus.right;
logger.debug("Resuming view build from token {}. flushing base table {}.{}", lastToken, baseCfs.metadata.ksName, baseCfs.name);
}
baseCfs.forceBlockingFlush();
function = org.apache.cassandra.db.lifecycle.View.selectFunction(SSTableSet.CANONICAL);
prevToken = lastToken;
long keysBuilt = 0;
try (Refs<SSTableReader> sstables = baseCfs.selectAndReference(function).refs;
ReducingKeyIterator iter = new ReducingKeyIterator(sstables))
{
SystemDistributedKeyspace.startViewBuild(ksname, viewName, localHostId);
while (!isStopRequested() && iter.hasNext())
{
DecoratedKey key = iter.next();
Token token = key.getToken();
if (lastToken == null || lastToken.compareTo(token) < 0)
{
for (Range<Token> range : ranges)
{
if (range.contains(token))
{
buildKey(key);
++keysBuilt;
if (prevToken == null || prevToken.compareTo(token) != 0)
{
SystemKeyspace.updateViewBuildStatus(ksname, viewName, key.getToken());
prevToken = token;
}
}
}
lastToken = null;
}
}
if (!isStopRequested())
{
logger.debug("Marking view({}.{}) as built covered {} keys ", ksname, viewName, keysBuilt);
SystemKeyspace.finishViewBuildStatus(ksname, viewName);
updateDistributed(ksname, viewName, localHostId);
}
else
{
logger.debug("Stopped build for view({}.{}) after covering {} keys", ksname, viewName, keysBuilt);
}
}
catch (Exception e)
{
ScheduledExecutors.nonPeriodicTasks.schedule(() -> CompactionManager.instance.submitViewBuilder(this),
5,
TimeUnit.MINUTES);
logger.warn("Materialized View failed to complete, sleeping 5 minutes before restarting", e);
}
}
private void updateDistributed(String ksname, String viewName, UUID localHostId)
{
try
{
SystemDistributedKeyspace.successfulViewBuild(ksname, viewName, localHostId);
SystemKeyspace.setViewBuiltReplicated(ksname, viewName);
}
catch (Exception e)
{
ScheduledExecutors.nonPeriodicTasks.schedule(() -> CompactionManager.instance.submitViewBuilder(this),
5,
TimeUnit.MINUTES);
logger.warn("Failed to updated the distributed status of view, sleeping 5 minutes before retrying", e);
}
}
public CompactionInfo getCompactionInfo()
{
long rangesCompleted = 0, rangesTotal = 0;
Token lastToken = prevToken;
// This approximation is not very accurate, but since we do not have a method which allows us to calculate the
// percentage of a range covered by a second range, this is the best approximation that we can calculate.
// Instead, we just count the total number of ranges that haven't been seen by the node (we use the order of
// the tokens to determine whether they have been seen yet or not), and the total number of ranges that a node
// has.
for (Range<Token> range : StorageService.instance.getLocalRanges(baseCfs.keyspace.getName()))
{
rangesTotal++;
if ((lastToken != null) && lastToken.compareTo(range.right) > 0)
rangesCompleted++;
}
return new CompactionInfo(baseCfs.metadata, OperationType.VIEW_BUILD, rangesCompleted, rangesTotal, Unit.RANGES, compactionId);
}
public boolean isGlobal()
{
return false;
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package at.tuwien.ldlab.statspace.service;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Date;
import javax.servlet.*;
import javax.servlet.http.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import at.tuwien.ldlab.statspace.codelist.CL_Unit_Measure;
import at.tuwien.ldlab.statspace.metadata.MetaData;
import at.tuwien.ldlab.statspace.metadata.SparqlQuery;
public class ReceiveMediatorQuery extends HttpServlet {
/**
*
*/
private static final long serialVersionUID = 1L;
private static Log log = LogFactory.getLog(ReceiveMediatorQuery.class);
//for request from users
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException{
String sQuery = request.getParameter("query").trim();
String sFormat = request.getParameter("format");
String sCache = request.getParameter("cache");
String sProv = request.getParameter("provenance");
if(sQuery==null){
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
response.addHeader("Access-Control-Allow-Origin", "*");
response.getWriter().println("Sorry, you need to provide a query");
}else{
log.info("Calling mediator service ");
SparqlQuery query = new SparqlQuery(sQuery);
if(query.getErrorStatus()){
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
response.addHeader("Access-Control-Allow-Origin", "*");
response.getWriter().println("Sorry, we can not analyze your input query");
return;
}
boolean bUseCache = true;
boolean bGetProvenance = false;
if(sCache!=null && sCache.toLowerCase().equals("no")){
bUseCache = false;
}
if(sProv!=null && sProv.toLowerCase().equals("yes")){
bGetProvenance = true;
}
MetaData inputMD = query.createMetaData();
if(inputMD.getNumberofComponent()==0){
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
response.addHeader("Access-Control-Allow-Origin", "*");
response.getWriter().println("Sorry, we can not analyze your input query. Your query should contain conditions for spatial dimension and temporal dimension");
}else{
inputMD.reorderComponentsForPrettyPrint();
String sVarObs = query.getVarObservation();
int i, j;
//Step 1. Identify all suitable datasets with the input query
ArrayList<MetaData> arrMetaData = inputMD.queryMetaDataByFilter();
if(arrMetaData.size()==0){
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
response.addHeader("Access-Control-Allow-Origin", "*");
response.getWriter().println("No dataset is suitable with your input query");
return;
}
//Step 2. Reorder components and remove components do not appear in the input metadata
for(i=0; i<arrMetaData.size(); i++){
if(arrMetaData.get(i).getNumberofComponent()<inputMD.getNumberofComponent()){
arrMetaData.remove(i);
i--;
}else{
arrMetaData.get(i).reorderComponentsForPrettyPrint();
arrMetaData.get(i).reorderComponents(inputMD);
}
}
//Step 3.1 Rewrite & send query for each dataset
String folderWebApp = getServletContext().getRealPath("/");
String sSeparator = File.separator;
for(i=0; i<arrMetaData.size(); i++){
arrMetaData.get(i).rewriteQuery(sVarObs, folderWebApp, sSeparator, true, bUseCache);
}
//Step 3.2. Query unit of hidden property
for(i=0; i<arrMetaData.size(); i++){
for(j=0; j<arrMetaData.get(i).getNumberofComponent(); j++)
if(arrMetaData.get(i).getComponent(j).getType().contains("Attribute")){
if(arrMetaData.get(i).getComponent(j).getValueSize()==0){
arrMetaData.get(i).queryHiddenProperty(j);
}
break;
}
}
//Step 3.3. Rewrite values of dimensions and unit
for(i=0; i<arrMetaData.size(); i++)
arrMetaData.get(i).rewriteResult();
//Step 3.4. Rewrite observed values if they use different units
Double scale;
String unit;
CL_Unit_Measure cl_unit = new CL_Unit_Measure();
for(i=0; i<arrMetaData.size(); i++){
for(j=0; j<arrMetaData.get(i).getNumberofComponent(); j++)
if(arrMetaData.get(i).getComponent(j).getType().contains("Attribute")){
if(arrMetaData.get(i).getComponent(j).getValueSize()>0){
unit = arrMetaData.get(i).getComponent(j).getValue(0);
scale = cl_unit.getScale(unit);
if(scale != 1.0)
arrMetaData.get(i).rewriteObservedValue(scale);
}
break;
}
}
//Step 4. Integrate achieved results
ArrayList<String> arrTemporalValue = new ArrayList<String>();
//Step 4.1. Find all temporal value
int index = inputMD.getIndexOfTemporalDimension();
for(i=0; i<arrMetaData.size(); i++)
arrTemporalValue = arrMetaData.get(i).getTemporalValues(index, arrTemporalValue);
//Step 5. Return result
String sResult="", sProvenance="";
long lStartTime = new Date().getTime();
if(sFormat==null||sFormat.toLowerCase().contains("html")){
sResult = getResultHTML(inputMD, arrMetaData, arrTemporalValue);
if(bGetProvenance)
sProvenance = getProvenanceHTML(arrMetaData);
}else if(sFormat.toLowerCase().contains("xml")){
sResult = getResultXML(inputMD, arrMetaData, arrTemporalValue).replace("&", "&");
if(bGetProvenance)
sProvenance = getProvenanceXML(arrMetaData);
}else{
sResult = getResultJSON(inputMD, arrMetaData, arrTemporalValue);
if(bGetProvenance)
sProvenance = getProvenanceJSON(arrMetaData);
}
response.setContentType("text/plain");
response.setHeader("Content-Disposition", "attachment;filename=query.csv");
InputStream in;
if(bGetProvenance)
in = new ByteArrayInputStream(("{\n\"result\":" + sResult + ",\n"
+ "\"provenance\":" + sProvenance + "\n}").getBytes("UTF-8"));
else
in = new ByteArrayInputStream(sResult.getBytes("UTF-8"));
int length = 0;
byte[] byteBuffer = new byte[4096];
ServletOutputStream outStream = response.getOutputStream();
while ((in != null) && ((length = in.read(byteBuffer)) != -1))
{
outStream.write(byteBuffer,0,length);
}
in.close();
outStream.close();
long lEndTime = new Date().getTime();
long difference = lEndTime - lStartTime;
log.info("Elapsed milliseconds: " + difference);
}
}
}
//for user interface
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
long lStartTime = new Date().getTime();
String sQuery = request.getParameter("query").trim();
String sFormat = request.getParameter("format");
String sCache = request.getParameter("cache");
if(sQuery==null){
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
response.addHeader("Access-Control-Allow-Origin", "*");
response.getWriter().println("Sorry, you need to provide a query");
}else{
log.info("Calling mediator service ");
SparqlQuery query = new SparqlQuery(sQuery);
if(query.getErrorStatus()){
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
response.addHeader("Access-Control-Allow-Origin", "*");
response.getWriter().println("Sorry, we can not analyze your input query");
return;
}
boolean bUseCache = true;
if(sCache==null || !sCache.toLowerCase().equals("on")){
bUseCache = false;
sCache = "false";
}else
sCache = "true";
MetaData inputMD = query.createMetaData();
if(inputMD.getNumberofComponent()==0){
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
response.addHeader("Access-Control-Allow-Origin", "*");
response.getWriter().println("Sorry, we can not analyze your input query. Your query should contain conditions for spatial dimension and temporal dimension");
}else{
inputMD.reorderComponentsForPrettyPrint();
String sVarObs = query.getVarObservation();
int i, j;
//Step 1. Identify all suitable datasets with the input query
// long lStartTime1 = new Date().getTime();
ArrayList<MetaData> arrMetaData = inputMD.queryMetaDataByFilter();
// long lEndTime1 = new Date().getTime();
// long difference1 = lEndTime1 - lStartTime1;
// log.info("Elapsed milliseconds: " + difference1);
if(arrMetaData.size()==0){
response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR );
response.addHeader("Access-Control-Allow-Origin", "*");
response.getWriter().println("No dataset is suitable with your input query");
return;
}
//Step 2. Reorder components and remove components do not appear in the input metadata
for(i=0; i<arrMetaData.size(); i++){
if(arrMetaData.get(i).getNumberofComponent()<inputMD.getNumberofComponent()){
arrMetaData.remove(i);
i--;
}else{
arrMetaData.get(i).reorderComponentsForPrettyPrint();
arrMetaData.get(i).reorderComponents(inputMD);
}
}
//Step 3.1 Rewrite & send query for each dataset
String folderWebApp = getServletContext().getRealPath("/");
String sSeparator = File.separator;
for(i=0; i<arrMetaData.size(); i++){
arrMetaData.get(i).rewriteQuery(sVarObs, folderWebApp, sSeparator, true, bUseCache);
}
//Step 3.2. Query unit of hidden property
for(i=0; i<arrMetaData.size(); i++){
for(j=0; j<arrMetaData.get(i).getNumberofComponent(); j++)
if(arrMetaData.get(i).getComponent(j).getType().contains("Attribute")){
if(arrMetaData.get(i).getComponent(j).getValueSize()==0){
arrMetaData.get(i).queryHiddenProperty(j);
}
break;
}
}
//remove redandunt components
for(i=0; i<arrMetaData.size(); i++){
while(arrMetaData.get(i).getNumberofComponent()>inputMD.getNumberofComponent())
arrMetaData.get(i).removeComponent(inputMD.getNumberofComponent());
}
//Step 3.3. Rewrite values of dimensions and unit
for(i=0; i<arrMetaData.size(); i++)
arrMetaData.get(i).rewriteResult();
//Step 3.4. Rewrite observed values if they use different units
Double scale;
String unit;
CL_Unit_Measure cl_unit = new CL_Unit_Measure();
for(i=0; i<arrMetaData.size(); i++){
for(j=0; j<arrMetaData.get(i).getNumberofComponent(); j++)
if(arrMetaData.get(i).getComponent(j).getType().contains("Attribute")){
if(arrMetaData.get(i).getComponent(j).getValueSize()>0){
unit = arrMetaData.get(i).getComponent(j).getValue(0);
scale = cl_unit.getScale(unit);
if(scale != 1.0)
arrMetaData.get(i).rewriteObservedValue(scale);
}
break;
}
}
//Step 4. Integrate achieved results
ArrayList<String> arrTemporalValue = new ArrayList<String>();
//Step 4.1. Find all temporal value
int index = inputMD.getIndexOfTemporalDimension();
for(i=0; i<arrMetaData.size(); i++)
arrTemporalValue = arrMetaData.get(i).getTemporalValues(index, arrTemporalValue);
// long lEndTime2 = new Date().getTime();
// long difference2 = lEndTime2 - lEndTime1;
// log.info("Elapsed milliseconds: " + difference2);
//Step 5. Return result
String sResult="", sProvenance="";
if(sFormat==null||sFormat.toLowerCase().contains("html")){
sResult = getResultHTML(inputMD, arrMetaData, arrTemporalValue);
sProvenance = getProvenanceHTML(arrMetaData);
}else if(sFormat.toLowerCase().contains("xml")){
try{
sResult = getResultXML(inputMD, arrMetaData, arrTemporalValue).replace("&", "&");
sProvenance = getProvenanceXML(arrMetaData);
}catch(Exception e){
}
}else{
sResult = getResultJSON(inputMD, arrMetaData, arrTemporalValue);
sProvenance = getProvenanceJSON(arrMetaData);
}
long lEndTime = new Date().getTime();
long difference = lEndTime - lStartTime;
log.info("Elapsed milliseconds: " + difference);
request.setAttribute("result", sResult);
request.setAttribute("provenance", sProvenance);
request.setAttribute("number", arrMetaData.size());
request.setAttribute("query", sQuery);
request.setAttribute("format", sFormat);
request.setAttribute("cache", sCache);
RequestDispatcher view = request.getRequestDispatcher("/mediator/index.jsp");
view.forward(request, response);
}
}
}
public static String getResultJSON(MetaData inputMD, ArrayList<MetaData> arrMetaData, ArrayList<String> arrTemporalValue){
int i, j, k, index, n, t, m;
ArrayList<String> arrVar = new ArrayList<String>();
ArrayList<Integer> arrIndex = new ArrayList<Integer>();
StringBuffer sResult = new StringBuffer();
String sTime;
sResult.append("{\n").append(" \"head\":{\n").append(" \"vars\":[");
//variable for dataset
if(inputMD.getDataSet().getVariable()!=""){
arrVar.add(inputMD.getDataSet().getVariable().substring(1));
sResult.append("\"").append(arrVar.get(0)).append("\",");
}
else{
arrVar.add("?dataset");
sResult.append("\"").append(arrVar.get(0)).append("\",");
}
//variables for components
for(i=0; i<inputMD.getNumberofComponent(); i++){
if(inputMD.getComponent(i).getVariable()!=""){
arrVar.add(inputMD.getComponent(i).getVariable().substring(1));
sResult.append("\"").append(arrVar.get(i+1)).append("\",");
}
}
sResult.deleteCharAt(sResult.length()-1);
sResult.append("]\n").append(" },\n").append(" \"results\":{\n").append(" \"bindings\":[\n");
//add values
index = inputMD.getIndexOfTemporalDimension();
n = arrTemporalValue.size();
for(i=0; i<n; i++){
sTime = arrTemporalValue.get(i);
for(j=0; j<arrMetaData.size(); j++){
arrIndex.clear();
arrIndex = arrMetaData.get(j).getComponent(index).indexOf(sTime);
if(arrIndex.size()>0){
for(k=0; k<arrIndex.size(); k++){
m = arrIndex.get(k);
sResult.append(" {\n");
//value of dataset variable
sResult.append(
" \"").append(arrVar.get(0)).append("\":\"").append(arrMetaData.get(j).getDataSet().getUri()).append("\",\n");
//value of other variables
for(t=0; t<arrMetaData.get(j).getNumberofComponent(); t++){
if(arrMetaData.get(j).getComponent(t).getValueSize()>=m && !arrMetaData.get(j).getComponent(t).getType().contains("Attribute")){
if(arrMetaData.get(j).getComponent(t).getType().contains("Measure"))
sResult.append(
" \"").append(arrVar.get(t+1)).append("\":\"").append(arrMetaData.get(j).getComponent(t).getValue(m)).append("\",\n");
else
sResult.append(
" \"").append(arrVar.get(t+1)).append("\":\"").append(arrMetaData.get(j).getComponent(t).getValueReference(m)).append("\",\n");
}
else if(arrMetaData.get(j).getComponent(t).getValueSize()==1){
sResult.append(
" \"").append(arrVar.get(t+1)).append("\":\"").append(arrMetaData.get(j).getComponent(t).getValueReference(0)).append("\",\n");
}else if(arrMetaData.get(j).getComponent(t).getType().contains("Attribute")){
sResult.append(
" \"").append(arrVar.get(t+1)).append("\":\"").append("http://statspace.linkedwidgets.org/codelist/cl_unitMeasure/NO").append("\",\n");
}
}
//remove the last comma and \n of each value in a binding
sResult.delete(sResult.length()-2, sResult.length());
//add comma at the end of each binding
sResult.append("\n },\n");
}
}
}
}
//remove the last comma of the last binding
sResult.delete(sResult.length()-2, sResult.length());
sResult.append("\n ]\n").append(" }\n").append("}");
return sResult.toString();
}
public static String getResultJSON2(MetaData inputMD, ArrayList<MetaData> arrMetaData, ArrayList<String> arrTemporalValue){
int i, j, k, index, n, t, m;
ArrayList<String> arrVar = new ArrayList<String>();
ArrayList<Integer> arrIndex = new ArrayList<Integer>();
StringBuffer sResult = new StringBuffer();
String sTime;
sResult.append("{\n").append(" \"head\":{\n").append(" \"vars\":[");
/*
* Note that we maybe need to return the label of dataset, component, and value
* Solution:
* + Do not add these variables to arrVar
* + If the variable for ds, component is available, check the variable of its label
*/
//variable for dataset
if(inputMD.getDataSet().getVariable()!=""){
arrVar.add(inputMD.getDataSet().getVariable().substring(1));
sResult.append("\"").append(arrVar.get(0)).append("\",");
}
else{
arrVar.add("?dataset");
sResult.append("\"").append(arrVar.get(0)).append("\",");
}
//variables for components
for(i=0; i<inputMD.getNumberofComponent(); i++){
if(inputMD.getComponent(i).getVariable()!=""){
arrVar.add(inputMD.getComponent(i).getVariable().substring(1));
sResult.append("\"").append(arrVar.get(i+1)).append("\",");
}
}
sResult.deleteCharAt(sResult.length()-1);
sResult.append("]\n").append(" },\n").append(" \"results\":{\n").append(" \"bindings\":[\n");
//add values
index = inputMD.getIndexOfTemporalDimension();
n = arrTemporalValue.size();
for(i=0; i<n; i++){
sTime = arrTemporalValue.get(i);
for(j=0; j<arrMetaData.size(); j++){
arrIndex.clear();
arrIndex = arrMetaData.get(j).getComponent(index).indexOf(sTime);
if(arrIndex.size()>0){
for(k=0; k<arrIndex.size(); k++){
m = arrIndex.get(k);
sResult.append(" {\n");
//value of dataset variable
sResult.append(
" \"").append(arrVar.get(0)).append("\":{").append(
"\"type\": \"uri\", \"value\": \"").append(arrMetaData.get(j).getDataSet().getUri()).append("\"},\n");
//value of other variables
for(t=0; t<arrMetaData.get(j).getNumberofComponent(); t++){
if(arrMetaData.get(j).getComponent(t).getValueSize()>=m && !arrMetaData.get(j).getComponent(t).getType().contains("Attribute")){
if(arrMetaData.get(j).getComponent(t).getType().contains("Measure"))
sResult.append(
" \"").append(arrVar.get(t+1)).append("\":{").append(
"\"type\": \"literal\", \"value\":\"").append(arrMetaData.get(j).getComponent(t).getValue(m)).append("\"},\n");
else
sResult.append(
" \"").append(arrVar.get(t+1)).append("\":{").append(
"\"type\": \"uri\", \"value\":\"").append(arrMetaData.get(j).getComponent(t).getValueReference(m)).append("\"},\n");
}
else if(arrMetaData.get(j).getComponent(t).getValueSize()==1){
sResult.append(
" \"").append(arrVar.get(t+1)).append("\":{").append(
"\"type\": \"uri\", \"value\":\"").append(arrMetaData.get(j).getComponent(t).getValueReference(0)).append("\"},\n");
}else if(arrMetaData.get(j).getComponent(t).getType().contains("Attribute")){
sResult.append(
" \"").append(arrVar.get(t+1)).append("\":{").append(
"\"type\": \"uri\", \"value\":\"").append("http://statspace.linkedwidgets.org/codelist/cl_unitMeasure/NO").append("\"},\n");
}
}
//remove the last comma and \n of each value in a binding
sResult.delete(sResult.length()-2, sResult.length());
//add comma at the end of each binding
sResult.append("\n },\n");
}
}
}
}
//remove the last comma of the last binding
sResult.delete(sResult.length()-2, sResult.length());
sResult.append("\n ]\n").append(" }\n").append("}");
return sResult.toString();
}
public static String getResultXML(MetaData inputMD, ArrayList<MetaData> arrMetaData, ArrayList<String> arrTemporalValue){
int i, j, k, index, n, t, m;
ArrayList<String> arrVar = new ArrayList<String>();
ArrayList<Integer> arrIndex = new ArrayList<Integer>();
StringBuffer sResult = new StringBuffer();
String sTime;
sResult.append("<?xml version='1.0' encoding='UTF-8'?>\n"+
"<sparql xmlns='http://www.w3.org/2005/sparql-results#'>\n"+
" <head>\n");
//variable for dataset
if(inputMD.getDataSet().getVariable()!=""){
arrVar.add(inputMD.getDataSet().getVariable().substring(1));
sResult.append(" <variable name='").append(arrVar.get(0)).append("'/>\n");
}
else{
arrVar.add("dataset");
sResult.append(" <variable name='").append(arrVar.get(0)).append("'/>\n");
}
//variables for components
for(i=0; i<inputMD.getNumberofComponent(); i++){
if(inputMD.getComponent(i).getVariable()!=""){
arrVar.add(inputMD.getComponent(i).getVariable().substring(1));
sResult.append(" <variable name='").append(arrVar.get(i+1)).append("'/>\n");
}
}
sResult.append(
" </head>\n").append(
" <results>\n");
//add values
index = inputMD.getIndexOfTemporalDimension();
n = arrTemporalValue.size();
for(i=0; i<n; i++){
sTime = arrTemporalValue.get(i);
for(j=0; j<arrMetaData.size(); j++){
arrIndex.clear();
arrIndex = arrMetaData.get(j).getComponent(index).indexOf(sTime);
if(arrIndex.size()>0){
for(k=0; k<arrIndex.size(); k++){
m = arrIndex.get(k);
sResult.append(
" <result>\n");
//value of dataset variable
sResult.append(
" <binding name='").append(arrVar.get(0)).append("'>\n").append(
" <uri>").append(arrMetaData.get(j).getDataSet().getUri()).append("</uri>\n").append(
" </binding>\n");
//value of other variables
for(t=0; t<arrMetaData.get(j).getNumberofComponent(); t++){
if(arrMetaData.get(j).getComponent(t).getValueSize()>=m && !arrMetaData.get(j).getComponent(t).getType().contains("Attribute")){
if(arrMetaData.get(j).getComponent(t).getType().contains("Measure"))
sResult.append(
" <binding name='").append(arrVar.get(t+1)).append("'>\n").append(
" <literal datatype='http://www.w3.org/2001/XMLSchema#decimal'>").append(arrMetaData.get(j).getComponent(t).getValue(m)).append("</literal>\n").append(
" </binding>\n");
else
sResult.append(
" <binding name='").append(arrVar.get(t+1)).append("'>\n").append(
" <uri>").append(arrMetaData.get(j).getComponent(t).getValueReference(m)).append("</uri>\n").append(
" </binding>\n");
}
else if( arrMetaData.get(j).getComponent(t).getValueSize()==1){
sResult.append(
" <binding name='").append(arrVar.get(t+1)).append("'>\n").append(
" <uri>").append(arrMetaData.get(j).getComponent(t).getValueReference(0)).append("</uri>\n").append(
" </binding>\n");
}else if(arrMetaData.get(j).getComponent(t).getType().contains("Attribute")){
sResult.append(
" <binding name='").append(arrVar.get(t+1)).append("'>\n").append(
" <uri>").append("http://statspace.linkedwidgets.org/codelist/cl_unitMeasure/NO").append("</uri>\n").append(
" </binding>\n");
}
}
sResult.append(
" </result>\n");
}
}
}
}
sResult.append(
" </results>\n").append(
"</sparql>");
return sResult.toString();
}
public static String getResultHTML(MetaData inputMD, ArrayList<MetaData> arrMetaData, ArrayList<String> arrTemporalValue){
int i, j, k, index, n, t, m;
ArrayList<String> arrVar = new ArrayList<String>();
ArrayList<Integer> arrIndex = new ArrayList<Integer>();
StringBuffer sResult = new StringBuffer();
String sTime;
sResult.append("<table>\n").append(
" <thead>\n").append(
" <tr>\n");
//variable for dataset
if(inputMD.getDataSet().getVariable()!=""){
arrVar.add(inputMD.getDataSet().getVariable().substring(1));
sResult.append(
" <th>").append(arrVar.get(0)).append("</th>\n");
}else{
arrVar.add("dataset");
sResult.append(
" <th>").append(arrVar.get(0)).append("</th>\n");
}
//variables for components
for(i=0; i<inputMD.getNumberofComponent(); i++){
if(inputMD.getComponent(i).getVariable()!=""){
arrVar.add(inputMD.getComponent(i).getVariable().substring(1));
sResult.append(
" <th>").append(arrVar.get(i+1)).append("</th>\n");
}
}
sResult.append(
" </tr>\n").append(
" </thead>\n").append(
" <tbody>\n");
//add values
index = inputMD.getIndexOfTemporalDimension();
n = arrTemporalValue.size();
for(i=0; i<n; i++){
sTime = arrTemporalValue.get(i);
for(j=0; j<arrMetaData.size(); j++){
arrIndex.clear();
arrIndex = arrMetaData.get(j).getComponent(index).indexOf(sTime);
if(arrIndex.size()>0){
for(k=0; k<arrIndex.size(); k++){
m = arrIndex.get(k);
sResult.append(
" <tr class='ds").append(j%16).append("'>\n");
//value of dataset variable
sResult.append(
" <td>").append(arrMetaData.get(j).getDataSet().getUri()).append("</td>\n");
//value of other variables
for(t=0; t<arrMetaData.get(j).getNumberofComponent(); t++){
if(arrMetaData.get(j).getComponent(t).getValueSize()>=m && !arrMetaData.get(j).getComponent(t).getType().contains("Attribute")){
if(arrMetaData.get(j).getComponent(t).getType().contains("Measure"))
sResult.append(
" <td>").append(arrMetaData.get(j).getComponent(t).getValue(m)).append("</td>\n");
else
sResult.append(
" <td>").append(arrMetaData.get(j).getComponent(t).getValueReference(m)).append("</td>\n");
}
else if(arrMetaData.get(j).getComponent(t).getValueSize()==1){
sResult.append(
" <td>").append(arrMetaData.get(j).getComponent(t).getValueReference(0)).append("</td>\n");
}else if(arrMetaData.get(j).getComponent(t).getType().contains("Attribute")){
sResult.append(
" <td>").append("http://statspace.linkedwidgets.org/codelist/cl_unitMeasure/NO").append("</td>\n");
}
}
sResult.append(
" </td>\n");
}
}
}
}
sResult.append(
" </tbody>\n").append(
"</table>");
return sResult.toString();
}
public static String getProvenanceJSON(ArrayList<MetaData> arrMetaData){
int i, j, k, n;
StringBuffer arrDistintiveUri;
StringBuffer sProvenance = new StringBuffer();
String sUri, sRefUri, sDSUri;
sProvenance.append("{\n").append(
" \"head\":{\n").append(
" \"var\":[\n").append(
" \"Co-reference URI used in the repository\",\n").append(
" \"URI used in the dataset\",\n").append(
" \"Dataset\"\n").append(
" ]\n").append(
" },\n").append(
" \"results\":{\n").append(
" \"bindings\":[\n");
n = arrMetaData.get(0).getNumberofComponent();
for(i=0; i<arrMetaData.size(); i++){
sDSUri = arrMetaData.get(i).getDataSet().getUri();
for(j=0; j<n; j++){
if(arrMetaData.get(i).getComponent(j).getType().contains("Measure")) continue; //ignore measure component
arrDistintiveUri = new StringBuffer();
sUri = arrMetaData.get(i).getComponent(j).getUri();
sRefUri = arrMetaData.get(i).getComponent(j).getUriReference();
sProvenance.append(" {\n")
.append(" \"Co-reference URI used in the repository\":")
.append("\"").append(sRefUri).append("\",\n")
.append(" \"URI used in the dataset\":")
.append("\"").append(sUri).append("\",\n")
.append(" \"Dataset\":")
.append("\"").append(sDSUri).append("\"\n")
.append(" },\n");
for(k=0; k<arrMetaData.get(i).getComponent(j).getValueSize(); k++){
sUri = arrMetaData.get(i).getComponent(j).getValue(k);
sRefUri = arrMetaData.get(i).getComponent(j).getValueReference(k);
if(arrDistintiveUri.indexOf(sUri)==-1){
arrDistintiveUri.append(sUri+";");
sProvenance.append(" {\n")
.append(" \"Co-reference URI used in the repository\":")
.append("\"").append(sRefUri).append("\",\n")
.append(" \"URI used in the dataset\":")
.append("\"").append(sDSUri).append("\",\n")
.append(" \"Dataset\":")
.append("\"").append(sDSUri).append("\"\n")
.append(" },\n");
}
}
}
}
sProvenance.delete(sProvenance.length()-2, sProvenance.length());
sProvenance.append("\n ]\n").append(" }\n").append("}");
return sProvenance.toString();
}
public static String getProvenanceXML(ArrayList<MetaData> arrMetaData){
int i, j, k, n;
StringBuffer arrDistintiveUri;
StringBuffer sProvenance = new StringBuffer();
String sUri, sRefUri, sDSUri;
sProvenance.append("<?xml version='1.0' encoding='UTF-8'?>\n"+
"<sparql xmlns='http://www.w3.org/2005/sparql-results#'>\n"+
" <head>\n");
sProvenance.append(" <variable name=").append("\"Co-reference URI used in the repository\"/>\n")
.append(" <variable name=").append("\"URI used in the dataset\"/>\n")
.append(" <variable name=").append("\"Dataset\"/>\n")
.append(" </head>\n");
sProvenance.append(" <results>\n");
n = arrMetaData.get(0).getNumberofComponent();
for(i=0; i<arrMetaData.size(); i++){
sDSUri = arrMetaData.get(i).getDataSet().getUri();
for(j=0; j<n; j++){
if(arrMetaData.get(i).getComponent(j).getType().contains("Measure")) continue; //ignore measure component
arrDistintiveUri = new StringBuffer();
sUri = arrMetaData.get(i).getComponent(j).getUri();
sRefUri = arrMetaData.get(i).getComponent(j).getUriReference();
sProvenance.append(" <result>\n")
.append(" <binding name=\"Co-reference URI used in the repository\">\n")
.append(" <uri>")
.append(sRefUri)
.append("</uri>\n")
.append(" </binding>\n")
.append(" <binding name=\"URI used in the dataset\">\n")
.append(" <uri>")
.append(sUri)
.append("</uri>\n")
.append(" </binding>\n")
.append(" <binding name=\"Dataset\">\n")
.append(" <uri>")
.append(sDSUri)
.append("</uri>\n")
.append(" </binding>\n")
.append(" </result>\n");
for(k=0; k<arrMetaData.get(i).getComponent(j).getValueSize(); k++){
sUri = arrMetaData.get(i).getComponent(j).getValue(k);
sRefUri = arrMetaData.get(i).getComponent(j).getValueReference(k);
if(arrDistintiveUri.indexOf(sUri)==-1){
arrDistintiveUri.append(sUri+";");
sProvenance.append(" <result>\n")
.append(" <binding name=\"Co-reference URI used in the repository\">\n")
.append(" <uri>")
.append(sRefUri)
.append("</uri>\n")
.append(" </binding>\n")
.append(" <binding name=\"URI used in the dataset\">\n")
.append(" <uri>")
.append(sUri)
.append("</uri>\n")
.append(" </binding>\n")
.append(" <binding name=\"Dataset\">\n")
.append(" <uri>")
.append(sDSUri)
.append("</uri>\n")
.append(" </binding>\n")
.append(" </result>\n");
}
}
}
}
sProvenance.append(" </results>\n").append("</sparql>");
return sProvenance.toString();
}
public static String getProvenanceHTML(ArrayList<MetaData> arrMetaData){
int i, j, k, n;
StringBuffer arrDistintiveUri;
StringBuffer sProvenance = new StringBuffer();
String sUri, sRefUri, sDSUri;
sProvenance.append("<table>\n").append(
" <thead>\n").append(
" <tr>\n").append(
" <th>Co-reference URI used in the repository</td>").append(
" </th>").append(
" <th>URI used in the dataset</td>").append(
" </th>").append(
" <th>Dataset</td>").append(
" </tr>");
n = arrMetaData.get(0).getNumberofComponent();
for(i=0; i<arrMetaData.size(); i++){
sDSUri = arrMetaData.get(i).getDataSet().getUri();
for(j=0; j<n; j++){
if(arrMetaData.get(i).getComponent(j).getType().contains("Measure")) continue; //ignore measure component
arrDistintiveUri = new StringBuffer();
sUri = arrMetaData.get(i).getComponent(j).getUri();
sRefUri = arrMetaData.get(i).getComponent(j).getUriReference();
sProvenance.append(" <tr class='ds").append(i%16).append("'>\n")
.append(" <td>")
.append(sRefUri)
.append(" </td>\n")
.append(" <td>")
.append(sUri)
.append(" </td>\n")
.append(" <td>")
.append(sDSUri)
.append(" </td>\n");
for(k=0; k<arrMetaData.get(i).getComponent(j).getValueSize(); k++){
sUri = arrMetaData.get(i).getComponent(j).getValue(k);
sRefUri = arrMetaData.get(i).getComponent(j).getValueReference(k);
if(arrDistintiveUri.indexOf(sUri)==-1){
arrDistintiveUri.append(sUri+";");
sProvenance.append(" <tr class='ds").append(i%16).append("'>\n")
.append(" <td>")
.append(sRefUri)
.append(" </td>\n")
.append(" <td>")
.append(sUri)
.append(" </td>\n")
.append(" <td>")
.append(sDSUri)
.append(" </td>\n");
}
}
}
}
sProvenance.append(
" </tbody>\n").append(
"</table>");
return sProvenance.toString();
}
}
| |
package com.buddycloud.customviews;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Path;
import android.graphics.Point;
import android.graphics.Typeface;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.shapes.RectShape;
import android.graphics.drawable.shapes.Shape;
import android.os.Build;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.ViewGroup;
import android.view.ViewTreeObserver;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.buddycloud.R;
import com.buddycloud.utils.TypefacesUtil;
/**
* Create custom error tooltip view. It offers different
* set of attributes that can help to customize the tooltip
* view.
*
* <code>
* <com.buddycloud.customviews.TooltipErrorView
* android:id="@+id/passwordErrorTooltip"
* android:layout_width="fill_parent"
* android:layout_height="wrap_content"
* tooltipErrorView:bgColor="#f6f6f6"
* tooltipErrorView:borderColor="#d5d5d5"
* tooltipErrorView:fontTypeface="Roboto-Light.ttf"
* tooltipErrorView:textColor="#768595" />
* </code>
*
* @author Adnan Urooj (Deminem)
*
*/
public class TooltipErrorView extends LinearLayout implements
ViewTreeObserver.OnPreDrawListener {
private static final String FONTS_PATH = "fonts/";
private static final int POINTER_HEIGHT = 6;
private static final int POINTER_WIDE_HEIGHT = 12;
private static final int POINTER_START = 35;
private ViewGroup mContentHolder;
private TextView mToolTipTV;
private CharSequence mText;
private int mColor;
private int mBorderColor;
private int mTextColor;
private int mTextSize;
private Typeface mTypeface;
private int mPointHeightPx;
public TooltipErrorView(final Context context) {
super(context);
init(context, null, 0);
}
public TooltipErrorView(final Context context, final AttributeSet attrs) {
super(context, attrs);
init(context, attrs, 0);
}
private void init(final Context context, final AttributeSet attrs,
final int defStyle) {
setLayoutParams(new LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.WRAP_CONTENT));
setOrientation(VERTICAL);
LayoutInflater.from(getContext()).inflate(R.layout.tooltip, this, true);
mContentHolder = (ViewGroup) findViewById(R.id.tooltip_contentholder);
mToolTipTV = (TextView) findViewById(R.id.tooltip_contenttv);
Resources r = getContext().getResources();
mPointHeightPx = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, POINTER_HEIGHT,
r.getDisplayMetrics());
TypedArray a = context.getTheme().obtainStyledAttributes(attrs,
R.styleable.TooltipErrorView, 0, 0);
try {
mText = a.getString(R.styleable.TooltipErrorView_text);
mTypeface = getTypeface(context,
a.getString(R.styleable.TooltipErrorView_fontTypeface));
mTextSize = a.getInt(R.styleable.TooltipErrorView_textSize, 18);
mTextColor = a.getColor(R.styleable.TooltipErrorView_textColor,
Color.parseColor("#72828C"));
mColor = a.getColor(R.styleable.TooltipErrorView_bgColor,
Color.parseColor("#f6f6f6"));
mBorderColor = a.getColor(R.styleable.TooltipErrorView_borderColor,
Color.parseColor("#d4d4d4"));
} finally {
a.recycle();
}
// setup the config for tooltip
setupToolTip();
}
@SuppressWarnings("deprecation")
@SuppressLint("NewApi")
@Override
public boolean onPreDraw() {
mContentHolder.measure(getMeasuredWidth(), getTextHeight() + mPointHeightPx);
Shape shape = getTooltipShape();
ShapeDrawable d = new ShapeDrawable(shape);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
mContentHolder.setBackground(d);
} else {
mContentHolder.setBackgroundDrawable(d);
}
return true;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int w = getMeasuredWidth();
int h = getMeasuredHeight();
w -= getPaddingLeft() - getPaddingRight();
h = getTextHeight() + getPaddingTop() + getPaddingBottom()
+ mPointHeightPx;
setMeasuredDimension(w, h);
}
@Override
public void setVisibility(int visibility) {
super.setVisibility(visibility);
mContentHolder.setVisibility(visibility);
mToolTipTV.setVisibility(visibility);
invalidate();
}
/**
* Setup the error tooltip
*
* @param text
*/
private void setupToolTip() {
if (!TextUtils.isEmpty(mText)) {
mToolTipTV.setText(mText);
}
mToolTipTV.setTypeface(mTypeface);
mToolTipTV.setTextColor(mTextColor);
mToolTipTV.setTextSize(mTextSize);
getViewTreeObserver().addOnPreDrawListener(this);
}
/**
* Get the typeface for given font
*
* @param context
* @param fontName
* @return
*/
private Typeface getTypeface(Context context, String fontName) {
if (!TextUtils.isEmpty(fontName)) {
return TypefacesUtil.get(context, FONTS_PATH + fontName);
}
return Typeface.SANS_SERIF;
}
private int getTextHeight() {
mToolTipTV.setText(mText);
mToolTipTV.measure(MeasureSpec.makeMeasureSpec(
mContentHolder.getWidth(), MeasureSpec.EXACTLY), MeasureSpec
.makeMeasureSpec(LayoutParams.WRAP_CONTENT,
MeasureSpec.UNSPECIFIED));
return mToolTipTV.getMeasuredHeight();
}
/**
* Set the tooltip text
*
* @param msg
*/
public void setText(CharSequence msg) {
if (!TextUtils.isEmpty(msg) && mText != msg) {
mText = msg;
mToolTipTV.setText(mText);
requestLayout();
invalidate();
}
}
/**
* Set the tooltip text color
*
* @param color
*/
public void setTextColor(int color) {
if (mTextColor != color) {
mTextColor = color;
mToolTipTV.setTextColor(color);
invalidate();
}
}
/**
* Set the tooltip border color
*
* @param color
*/
public void setBorderColor(int color) {
if (mBorderColor != color) {
mBorderColor = color;
invalidate();
}
}
/**
* Set the tooltip background color
*
* @param color
*/
public void setBackgroundColor(int color) {
if (mColor != color) {
mColor = color;
invalidate();
}
}
/**
* Set the tooltip typeface
*
* @param color
*/
public void setTypeface(Typeface typeface) {
if (mTypeface != typeface) {
mTypeface = typeface;
invalidate();
}
}
/**
* Get the error tooltip text
*
* @return
*/
public CharSequence getText() {
return mText;
}
/**
* Get the tooltip background color
*
* @return
*/
public int getColor() {
return mColor;
}
/**
* Get the text color
*
* @return
*/
public int getTextColor() {
return mTextColor;
}
/**
* Get the typeface
*
* @return
*/
public Typeface getTypeface() {
return mTypeface;
}
/**
* Get the tooltip shape
*
* @return
*/
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
private Shape getTooltipShape() {
Resources r = this.getContext().getResources();
final int pointHeightPx = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, POINTER_HEIGHT,
r.getDisplayMetrics());
final int pointedHeightPx = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, POINTER_WIDE_HEIGHT,
r.getDisplayMetrics());
final int pointStartPx = (int) TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, POINTER_START,
r.getDisplayMetrics());
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mContentHolder
.getLayoutParams();
mToolTipTV.setY(pointHeightPx);
params.height = mToolTipTV.getHeight() + pointHeightPx;
final Path rectPath = new Path();
final Path rectBorderPath = new Path();
// Create the rectangular shape
Shape shape = new RectShape() {
@Override
protected void onResize(float w, float h) {
createShapePath(rectPath, rectBorderPath, w, h, pointHeightPx,
pointedHeightPx, pointStartPx);
}
@Override
public void draw(Canvas canvas, Paint paint) {
drawOnCanvas(canvas, paint, rectPath, rectBorderPath);
}
};
return shape;
}
/**
* Create the shape path
*
* @param rectPath
* @param rectBorderPath
* @param width
* @param height
* @param pointHeightPx
* @param pointedHeightPx
* @param pointStartPx
*/
private void createShapePath(Path rectPath, Path rectBorderPath,
float width, float height, int pointHeightPx, int pointedHeightPx,
int pointStartPx) {
int w = (int) width;
int h = (int) height;
Point a = new Point(0, h);
Point b = new Point(w, h);
Point c = new Point(w, pointHeightPx);
Point d = new Point((w - (w - pointStartPx)) + (pointedHeightPx / 2),
pointHeightPx);
Point e = new Point((w - (w - pointStartPx)), 0); // this is the sharp
// point of the
// triangle
Point f = new Point((w - (w - pointStartPx)) - (pointedHeightPx / 2),
pointHeightPx);
Point g = new Point(0, pointHeightPx);
rectPath.reset();
rectPath.moveTo(a.x, a.y);
rectPath.lineTo(b.x, b.y);
rectPath.lineTo(c.x, c.y);
rectPath.lineTo(d.x, d.y);
rectPath.lineTo(e.x, e.y);
rectPath.lineTo(f.x, f.y);
rectPath.lineTo(g.x, g.y);
rectPath.close();
rectBorderPath.reset();
rectBorderPath.moveTo(a.x, a.y);
rectBorderPath.lineTo(b.x, b.y);
rectBorderPath.lineTo(c.x, c.y);
rectBorderPath.lineTo(d.x, d.y);
rectBorderPath.lineTo(e.x, e.y);
rectBorderPath.lineTo(f.x, f.y);
rectBorderPath.lineTo(g.x, g.y);
rectBorderPath.close();
}
/**
* Draw the shape on canvas
*
* @param c
* @param p
* @param rectPath
* @param rectBorderPath
*/
private void drawOnCanvas(Canvas c, Paint p, Path rectPath,
Path rectBorderPath) {
// set background color
if (rectPath != null) {
p.setColor(mColor);
c.drawPath(rectPath, p);
}
// set border
if (rectBorderPath != null) {
p.setColor(mBorderColor);
p.setStyle(Style.STROKE);
p.setStrokeWidth(3);
c.drawPath(rectBorderPath, p);
}
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.dmn.backend.marshalling.v1_2;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import javax.xml.namespace.QName;
import javax.xml.transform.Source;
import javax.xml.transform.stream.StreamSource;
import org.junit.Test;
import org.kie.dmn.api.marshalling.DMNMarshaller;
import org.kie.dmn.backend.marshalling.v1_2.extensions.MyTestRegister;
import org.kie.dmn.backend.marshalling.v1x.DMNMarshallerFactory;
import org.kie.dmn.model.api.Definitions;
import org.kie.dmn.model.api.dmndi.DMNShape;
import org.kie.dmn.model.api.dmndi.DMNStyle;
import org.kie.dmn.model.v1_2.KieDMNModelInstrumentedBase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Node;
import org.xmlunit.builder.DiffBuilder;
import org.xmlunit.builder.Input;
import org.xmlunit.diff.ComparisonResult;
import org.xmlunit.diff.ComparisonType;
import org.xmlunit.diff.Diff;
import org.xmlunit.diff.DifferenceEvaluators;
import org.xmlunit.validation.Languages;
import org.xmlunit.validation.ValidationProblem;
import org.xmlunit.validation.ValidationResult;
import org.xmlunit.validation.Validator;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class UnmarshalMarshalTest {
private static final StreamSource DMN12_SCHEMA_SOURCE = new StreamSource(UnmarshalMarshalTest.class.getResource("/DMN12.xsd").getFile());
private static final DMNMarshaller MARSHALLER = new org.kie.dmn.backend.marshalling.v1x.XStreamMarshaller();
protected static final Logger logger = LoggerFactory.getLogger(UnmarshalMarshalTest.class);
@Test
public void testV12_ch11example() throws Exception {
testRoundTripV12("org/kie/dmn/backend/marshalling/v1_2/", "ch11example.dmn");
}
@Test
public void testV12_ImportName() throws Exception {
testRoundTripV12("org/kie/dmn/backend/marshalling/v1_2/", "ImportName.dmn");
}
@Test
public void testV12_DecisionService20180911v12() throws Exception {
// DROOLS-2987 DMN v1.2 marshaller failing marshalling DecisionService node and dmndi:DMNDecisionServiceDividerLine
testRoundTripV12("org/kie/dmn/backend/marshalling/v1_2/", "DecisionService20180911v12.dmn");
}
@Test
public void testV12_DiamondWithColors() throws Exception {
testRoundTripV12("org/kie/dmn/backend/marshalling/v1_2/", "diamondWithColors.dmn");
}
@Test
public void testV12_DMNDIDiagramElementExtension() throws Exception {
testRoundTripV12("org/kie/dmn/backend/marshalling/v1_2/", "DMNDIDiagramElementExtension.dmn");
}
@Test
public void testV12_DMNDIDiagramElementExtension_withContent() throws Exception {
DMNMarshaller marshaller = DMNMarshallerFactory.newMarshallerWithExtensions(Arrays.asList(new MyTestRegister()));
testRoundTrip("org/kie/dmn/backend/marshalling/v1_2/", "DMNDIDiagramElementExtension_withContent.dmn", marshaller, DMN12_SCHEMA_SOURCE);
}
@Test
public void test_hardcoded_java_max_call() throws Exception {
testRoundTripV12("org/kie/dmn/backend/marshalling/v1_2/", "hardcoded-java-max-call.dmn");
}
@Test
public void test_FontSize_sharedStyle() throws Exception {
testRoundTripV12("org/kie/dmn/backend/marshalling/v1_2/", "test-FontSize-sharedStyle.dmn");
Definitions definitions = MARSHALLER.unmarshal(new InputStreamReader(this.getClass().getResourceAsStream("test-FontSize-sharedStyle.dmn")));
DMNShape shape0 = (DMNShape) definitions.getDMNDI().getDMNDiagram().get(0).getDMNDiagramElement().get(0);
DMNStyle shape0sharedStyle = (DMNStyle) shape0.getDMNLabel().getSharedStyle();
assertEquals("LS_4d396200-362f-4939-830d-32d2b4c87042_0", shape0sharedStyle.getId());
assertEquals(21d, shape0sharedStyle.getFontSize(), 0.0d);
}
@Test
public void test_DMNLabel_Text() throws Exception {
testRoundTripV12("org/kie/dmn/backend/marshalling/v1_2/", "DMNLabel-Text.dmn");
}
public void testRoundTripV12(String subdir, String xmlfile) throws Exception {
testRoundTrip(subdir, xmlfile, MARSHALLER, DMN12_SCHEMA_SOURCE);
}
public void testRoundTrip(String subdir, String xmlfile, DMNMarshaller marshaller, Source schemaSource) throws Exception {
File baseOutputDir = new File("target/test-xmlunit/");
File testClassesBaseDir = new File("target/test-classes/");
File inputXMLFile = new File(testClassesBaseDir, subdir + xmlfile);
FileInputStream fis = new FileInputStream( inputXMLFile );
Definitions unmarshal = marshaller.unmarshal( new InputStreamReader( fis ) );
Validator v = Validator.forLanguage(Languages.W3C_XML_SCHEMA_NS_URI);
v.setSchemaSource(schemaSource);
ValidationResult validateInputResult = v.validateInstance(new StreamSource( inputXMLFile ));
if (!validateInputResult.isValid()) {
for ( ValidationProblem p : validateInputResult.getProblems()) {
System.err.println(p);
}
}
assertTrue(validateInputResult.isValid());
final File subdirFile = new File(baseOutputDir, subdir);
if (!subdirFile.mkdirs()) {
logger.warn("mkdirs() failed for File: " + subdirFile.getAbsolutePath() + "!");
}
FileOutputStream sourceFos = new FileOutputStream( new File(baseOutputDir, subdir + "a." + xmlfile) );
Files.copy(
new File(testClassesBaseDir, subdir + xmlfile).toPath(),
sourceFos
);
sourceFos.flush();
sourceFos.close();
System.out.println( marshaller.marshal(unmarshal) );
File outputXMLFile = new File(baseOutputDir, subdir + "b." + xmlfile);
try (FileWriter targetFos = new FileWriter( outputXMLFile )) {
marshaller.marshal(unmarshal, targetFos);
}
// Should also validate output XML:
ValidationResult validateOutputResult = v.validateInstance(new StreamSource( outputXMLFile ));
if (!validateOutputResult.isValid()) {
for ( ValidationProblem p : validateOutputResult.getProblems()) {
System.err.println(p);
}
}
assertTrue(validateOutputResult.isValid());
System.out.println("\n---\nDefault XMLUnit comparison:");
Source control = Input.fromFile( inputXMLFile ).build();
Source test = Input.fromFile( outputXMLFile ).build();
Diff allDiffsSimilarAndDifferent = DiffBuilder
.compare( control )
.withTest( test )
.build();
allDiffsSimilarAndDifferent.getDifferences().forEach(System.out::println);
System.out.println("XMLUnit comparison with customized similarity for defaults:");
// in the following a manual DifferenceEvaluator is needed until XMLUnit is configured for properly parsing the XSD linked inside the XML,
// in order to detect the optional+defaultvalue attributes of xml element which might be implicit in source-test, and explicit in test-serialized.
/*
* $ grep -Eo "<xsd:attribute name=\\\"([^\\\"]*)\\\" type=\\\"([^\\\"]*)\\\" use=\\\"optional\\\" default=\\\"([^\\\"])*\\\"" dmn.xsd
<xsd:attribute name="expressionLanguage" type="xsd:anyURI" use="optional" default="http://www.omg.org/spec/FEEL/20140401"
<xsd:attribute name="typeLanguage" type="xsd:anyURI" use="optional" default="http://www.omg.org/spec/FEEL/20140401"
<xsd:attribute name="isCollection" type="xsd:boolean" use="optional" default="false"
<xsd:attribute name="hitPolicy" type="tHitPolicy" use="optional" default="UNIQUE"
<xsd:attribute name="preferredOrientation" type="tDecisionTableOrientation" use="optional" default="Rule-as-Row"
DMNv1.2:
<xsd:attribute name="kind" type="tFunctionKind" default="FEEL"/>
<xsd:attribute name="textFormat" type="xsd:string" default="text/plain"/>
<xsd:attribute name="associationDirection" type="tAssociationDirection" default="None"/>
DMNDIv1.2:
<xsd:attribute name="isCollapsed" type="xsd:boolean" use="optional" default="false"/>
*/
Set<QName> attrWhichCanDefault = new HashSet<QName>();
attrWhichCanDefault.addAll(Arrays.asList(new QName[]{
new QName("expressionLanguage"),
new QName("typeLanguage"),
new QName("isCollection"),
new QName("hitPolicy"),
new QName("preferredOrientation"),
new QName("kind"),
new QName("textFormat"),
new QName("associationDirection"),
new QName("isCollapsed")
}));
Set<String> nodeHavingDefaultableAttr = new HashSet<>();
nodeHavingDefaultableAttr.addAll(Arrays.asList(new String[]{"definitions", "decisionTable", "itemDefinition", "itemComponent", "encapsulatedLogic", "textAnnotation", "association", "DMNShape"}));
Diff checkSimilar = DiffBuilder
.compare( control )
.withTest( test )
.withDifferenceEvaluator(
DifferenceEvaluators.chain(DifferenceEvaluators.Default,
((comparison, outcome) -> {
if (outcome == ComparisonResult.DIFFERENT && comparison.getType() == ComparisonType.ELEMENT_NUM_ATTRIBUTES) {
if (comparison.getControlDetails().getTarget().getNodeName().equals( comparison.getTestDetails().getTarget().getNodeName() )
&& nodeHavingDefaultableAttr.contains( safeStripDMNPRefix( comparison.getControlDetails().getTarget() ) )) {
return ComparisonResult.SIMILAR;
}
}
// DMNDI/DMNDiagram#documentation is actually deserialized escaped with newlines as by the XML JDK infra.
if (outcome == ComparisonResult.DIFFERENT && comparison.getType() == ComparisonType.ATTR_VALUE) {
if (comparison.getControlDetails().getTarget().getNodeName().equals( comparison.getTestDetails().getTarget().getNodeName() )
&& comparison.getControlDetails().getTarget().getNodeType() == Node.ATTRIBUTE_NODE
&& comparison.getControlDetails().getTarget().getLocalName().equals("documentation")) {
return ComparisonResult.SIMILAR;
}
}
if (outcome == ComparisonResult.DIFFERENT && comparison.getType() == ComparisonType.ATTR_NAME_LOOKUP) {
boolean testIsDefaulableAttribute = false;
QName whichDefaultableAttr = null;
if (comparison.getControlDetails().getValue() == null && attrWhichCanDefault.contains(comparison.getTestDetails().getValue())) {
for (QName a : attrWhichCanDefault) {
boolean check = comparison.getTestDetails().getXPath().endsWith("@"+a);
if (check) {
testIsDefaulableAttribute = true;
whichDefaultableAttr = a;
continue;
}
}
}
if ( testIsDefaulableAttribute ) {
if (comparison.getTestDetails().getXPath().equals(comparison.getControlDetails().getXPath() + "/@" + whichDefaultableAttr )) {
// TODO missing to check the explicited option attribute has value set to the actual default value.
return ComparisonResult.SIMILAR;
}
}
}
return outcome;
})))
.ignoreWhitespace()
.checkForSimilar()
.build();
checkSimilar.getDifferences().forEach(System.err::println);
if (!checkSimilar.getDifferences().iterator().hasNext()) {
System.out.println("[ EMPTY - no diffs using customized similarity ]");
}
assertFalse("XML are NOT similar: " + checkSimilar.toString(), checkSimilar.hasDifferences());
}
private String safeStripDMNPRefix(Node target) {
if (KieDMNModelInstrumentedBase.URI_DMN.equals(target.getNamespaceURI()) ||
KieDMNModelInstrumentedBase.URI_DMNDI.equals(target.getNamespaceURI())) {
return target.getLocalName();
}
return null;
}
}
| |
package aQute.lib.justif;
import java.util.ArrayList;
import java.util.Formatter;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
/**
* Formatter. This formatter allows you to build up an input string and then
* wraps the text. The following markup is available
* <ul>
* <li>$- - Line over the remaining width
* <li>\\t[0-9] - Go to tab position, and set indent to that position
* <li>\\f - Newlin
* </ul>
*/
public class Justif {
final int[] tabs;
final int width;
StringBuilder sb = new StringBuilder();
Formatter f = new Formatter(sb);
public Justif(int width, int... tabs) {
this.tabs = tabs == null || tabs.length == 0 ? new int[] {
30, 40, 50, 60, 70
} : tabs;
this.width = width == 0 ? 73 : width;
}
public Justif() {
this(0);
}
/**
* Routine to wrap a stringbuffer. Basically adds line endings but has the
* following control characters:
* <ul>
* <li>Space at the beginnng of a line is repeated when wrapped for indent.
* </li>
* <li>A tab will mark the current position and wrapping will return to that
* position</li>
* <li>A form feed in a tabbed colum will break but stay in the column</li>
* </ul>
*
* @param sb
*/
public void wrap(StringBuilder sb) {
List<Integer> indents = new ArrayList<>();
int indent = 0;
int linelength = 0;
int lastSpace = 0;
int r = 0;
boolean begin = true;
while (r < sb.length()) {
switch (sb.charAt(r++)) {
case '\r' :
indents.clear();
sb.setCharAt(r - 1, '\n');
// FALL THROUGH
case '\n' :
indent = indents.isEmpty() ? 0 : indents.remove(0);
linelength = 0;
begin = true;
lastSpace = 0;
break;
case ' ' :
if (begin)
indent++;
else {
while (r < sb.length() && sb.charAt(r) == ' ')
sb.delete(r, r + 1);
}
lastSpace = r - 1;
linelength++;
break;
case '\t' :
sb.deleteCharAt(--r);
indents.add(indent);
if (r < sb.length()) {
char digit = sb.charAt(r);
if (Character.isDigit(digit)) {
sb.deleteCharAt(r);
int column = (digit - '0');
if (column < tabs.length)
indent = tabs[column];
else
indent = column * 8;
int diff = indent - linelength;
if (diff > 0) {
for (int i = 0; i < diff; i++) {
sb.insert(r, ' ');
}
r += diff;
linelength += diff;
}
} else
System.err.println("missing digit after \t");
}
break;
case '\f' :
sb.setCharAt(r - 1, '\n');
for (int i = 0; i < indent; i++) {
sb.insert(r, ' ');
}
r += indent;
while (r < sb.length() && sb.charAt(r) == ' ')
sb.delete(r, r + 1);
linelength = 0;
lastSpace = 0;
break;
case '$' :
if (sb.length() > r) {
char c = sb.charAt(r);
if (c == '-' || c == '_' || c == '\u2014') {
sb.delete(r - 1, r); // remove $
begin = false;
linelength++;
while (linelength < width - 1) {
sb.insert(r++, c);
linelength++;
}
break;
}
}
case '\u00A0' : // non breaking space
sb.setCharAt(r - 1, ' '); // Turn it into a space
// fall through
default :
linelength++;
begin = false;
if (linelength > width) {
if (lastSpace == 0) {
lastSpace = r - 1;
sb.insert(lastSpace, ' ');
r++;
}
sb.setCharAt(lastSpace, '\n');
linelength = r - lastSpace - 1;
for (int i = 0; i < indent; i++) {
sb.insert(lastSpace + 1, ' ');
linelength++;
}
r += indent;
lastSpace = 0;
}
}
}
}
public String wrap() {
wrap(sb);
return sb.toString();
}
public Formatter formatter() {
return f;
}
public String toString() {
wrap(sb);
return sb.toString();
}
public void indent(int indent, String string) {
for (int i = 0; i < string.length(); i++) {
char c = string.charAt(i);
if (i == 0) {
for (int j = 0; j < indent; j++)
sb.append(' ');
} else {
sb.append(c);
if (c == '\n')
for (int j = 0; j < indent; j++)
sb.append(' ');
}
}
}
// TODO not working yet
public void entry(String key, String separator, Object value) {
sb.append(key);
sb.append("\t1");
sb.append(separator);
sb.append("\t2");
if (value instanceof Iterable) {
Iterator< ? > it = ((Iterable< ? >) value).iterator();
boolean hadone = false;
String del = "";
while (it.hasNext()) {
sb.append(del).append(it.next() + "");
sb.append("\r");
hadone = true;
del = "\t2";
}
if (!hadone)
sb.append("\r");
} else {
sb.append(value + "");
sb.append("\r");
}
}
public void table(Map<String,Object> table, String separator) {
for (Entry<String,Object> e : table.entrySet()) {
entry(e.getKey(), separator, e.getValue());
}
}
public String toString(Object o) {
String s = "" + o;
if (s.length() > 50)
return s.replaceAll(",", ", \\\f");
return s;
}
}
| |
package org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.set;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import junit.framework.Assert;
import org.buddycloud.channelserver.Configuration;
import org.buddycloud.channelserver.channel.ChannelManager;
import org.buddycloud.channelserver.db.exception.NodeStoreException;
import org.buddycloud.channelserver.packetHandler.iq.IQTestHandler;
import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.JabberPubsub;
import org.buddycloud.channelserver.packetprocessor.iq.namespace.pubsub.PubSubElementProcessorAbstract;
import org.buddycloud.channelserver.pubsub.affiliation.Affiliations;
import org.buddycloud.channelserver.pubsub.model.NodeMembership;
import org.buddycloud.channelserver.pubsub.model.NodeSubscription;
import org.buddycloud.channelserver.pubsub.model.impl.NodeMembershipImpl;
import org.buddycloud.channelserver.pubsub.subscription.NodeSubscriptionMock;
import org.buddycloud.channelserver.pubsub.subscription.Subscriptions;
import org.buddycloud.channelserver.utils.node.item.payload.Buddycloud;
import org.dom4j.Element;
import org.dom4j.tree.BaseElement;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.xmpp.packet.IQ;
import org.xmpp.packet.JID;
import org.xmpp.packet.Message;
import org.xmpp.packet.Packet;
import org.xmpp.packet.PacketError;
import org.xmpp.resultsetmanagement.ResultSetImpl;
public class SubscriptionEventTest extends IQTestHandler {
private IQ request;
private PubSubElementProcessorAbstract event;
private Element element;
private BlockingQueue<Packet> queue = new LinkedBlockingQueue<Packet>();
private String subscriber = "francisco@denmark.lit";
private String node = "/user/pamela@denmark.lit/posts";
private JID jid = new JID("juliet@shakespeare.lit");
private ChannelManager dataStore;
@Before
public void setUp() throws Exception {
queue = new LinkedBlockingQueue<Packet>();
event = new SubscriptionEvent(queue, dataStore);
request = readStanzaAsIq("/iq/pubsub/subscribe/authorizationPendingGrantReply.stanza");
element = new BaseElement("subscriptions");
element.addAttribute("node", node);
dataStore = mock(ChannelManager.class);
Configuration.getInstance().putProperty(
Configuration.CONFIGURATION_LOCAL_DOMAIN_CHECKER, Boolean.TRUE.toString());
when(dataStore.nodeExists(anyString())).thenReturn(true);
NodeMembership membership = new NodeMembershipImpl(node, jid,
Subscriptions.subscribed, Affiliations.member, null);
when(dataStore.getNodeMembership(anyString(), any(JID.class)))
.thenReturn(membership);
ArrayList<NodeSubscription> subscribers = new ArrayList<NodeSubscription>();
subscribers.add(new NodeSubscriptionMock(new JID(
"romeo@shakespeare.lit")));
subscribers.add(new NodeSubscriptionMock(new JID(
"hamlet@shakespeare.lit")));
doReturn(new ResultSetImpl<NodeSubscription>(subscribers)).when(
dataStore).getNodeSubscriptionListeners(anyString());
event.setChannelManager(dataStore);
}
@Test
public void testPassingSubscriptionsAsElementNameReturnsTrue() {
Element element = new BaseElement("subscriptions");
Assert.assertTrue(event.accept(element));
}
@Test
public void testPassingNotSubscriptionsAsElementNameReturnsFalse() {
Element element = new BaseElement("not-subscriptions");
Assert.assertFalse(event.accept(element));
}
@Test
public void testNotProvidingNodeAttributeReturnsErrorStanza()
throws Exception {
BaseElement element = new BaseElement("subscriptions");
event.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.modify, error.getType());
Assert.assertEquals("nodeid-required",
error.getApplicationConditionName());
}
@Test
public void testNotProvidingSubscriptionChildNodeReturnsErrorStanza()
throws Exception {
IQ request = toIq(readStanzaAsString(
"/iq/pubsub/subscribe/authorizationPendingGrantReply.stanza")
.replaceFirst(
"<subscription jid='francisco@denmark.lit' subscription='subscribed'/>",
""));
event.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.modify, error.getType());
Assert.assertEquals(PacketError.Condition.bad_request,
error.getCondition());
}
@Test
public void testNotProvidingJidAttributeReturnsErrorStanza()
throws Exception {
IQ request = toIq(readStanzaAsString(
"/iq/pubsub/subscribe/authorizationPendingGrantReply.stanza")
.replaceFirst("jid='francisco@denmark.lit'", ""));
event.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.modify, error.getType());
Assert.assertEquals(PacketError.Condition.bad_request,
error.getCondition());
}
@Test
public void testNotProvidingSubscriptionAttributeReturnsErrorStanza()
throws Exception {
IQ request = toIq(readStanzaAsString(
"/iq/pubsub/subscribe/authorizationPendingGrantReply.stanza")
.replaceFirst("subscription='subscribed'", ""));
event.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.modify, error.getType());
Assert.assertEquals(PacketError.Condition.bad_request,
error.getCondition());
}
@SuppressWarnings("unchecked")
@Test
public void testNodeStoreExceptionResultsInInternalServerErrorStanza()
throws Exception {
when(dataStore.nodeExists(anyString())).thenThrow(
NodeStoreException.class);
event.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.wait, error.getType());
Assert.assertEquals(PacketError.Condition.internal_server_error,
error.getCondition());
}
@Test
public void testNonExistantNodeRetunsErrorStanza() throws Exception {
when(dataStore.nodeExists(node)).thenReturn(false);
event.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.cancel, error.getType());
Assert.assertEquals(PacketError.Condition.item_not_found,
error.getCondition());
}
@Test
public void userWithoutSubscriptionReturnsErrorStanza() throws Exception {
when(dataStore.getNodeMembership(anyString(), any(JID.class)))
.thenReturn(
new NodeMembershipImpl(node, jid, Subscriptions.none,
Affiliations.none, null));
event.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.auth, error.getType());
Assert.assertEquals(PacketError.Condition.forbidden,
error.getCondition());
}
@Test
public void userWhoIsntOwnerOrModeratorCantUpdateSubscription()
throws Exception {
when(dataStore.getNodeMembership(anyString(), any(JID.class)))
.thenReturn(
new NodeMembershipImpl(node, jid,
Subscriptions.subscribed, Affiliations.member,
null));
event.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.auth, error.getType());
Assert.assertEquals(PacketError.Condition.forbidden,
error.getCondition());
}
@Test
public void subscribingUserMustHaveExistingSubscriptionToUpdate()
throws Exception {
NodeMembership membership = new NodeMembershipImpl(node, new JID(
subscriber), Subscriptions.none, Affiliations.owner, null);
when(dataStore.getNodeMembership(anyString(), any(JID.class)))
.thenReturn(membership);
NodeMembership inviteeMemebership = new NodeMembershipImpl(node,
new JID(subscriber), Subscriptions.none, Affiliations.owner,
null);
when(dataStore.getNodeMembership(anyString(), eq(jid))).thenReturn(
inviteeMemebership);
event.process(element, jid, request, null);
Packet response = queue.poll();
PacketError error = response.getError();
Assert.assertNotNull(error);
Assert.assertEquals(PacketError.Type.modify, error.getType());
Assert.assertEquals(PacketError.Condition.unexpected_request,
error.getCondition());
}
@Test
public void passingInvalidSubscriptionTypeSetsSubscriptionToNone()
throws Exception {
IQ request = toIq(readStanzaAsString(
"/iq/pubsub/subscribe/authorizationPendingGrantReply.stanza")
.replaceFirst("subscription='subscribed'",
"subscription='i-can-haz-all-the-items'"));
ArgumentCaptor<NodeSubscription> argument = ArgumentCaptor
.forClass(NodeSubscription.class);
when(dataStore.getNodeMembership(anyString(), any(JID.class)))
.thenReturn(
new NodeMembershipImpl(node, jid,
Subscriptions.subscribed, Affiliations.owner,
null));
ArrayList<NodeSubscription> subscribers = new ArrayList<NodeSubscription>();
doReturn(new ResultSetImpl<NodeSubscription>(subscribers)).when(
dataStore).getNodeSubscriptionListeners(anyString());
event.process(element, jid, request, null);
verify(dataStore, times(1)).addUserSubscription(argument.capture());
NodeSubscription subscription = argument.getValue();
Assert.assertEquals(Subscriptions.none, subscription.getSubscription());
}
@Test
public void passingValidSubscriptionSendsOutExpectedNotifications()
throws Exception {
IQ request = toIq(readStanzaAsString(
"/iq/pubsub/subscribe/authorizationPendingGrantReply.stanza")
.replaceFirst("subscription='subscribed'",
"subscription='subscribed'"));
NodeMembership membership = new NodeMembershipImpl(node, new JID(
subscriber), Subscriptions.subscribed, Affiliations.moderator,
null);
when(dataStore.getNodeMembership(eq(node), any(JID.class))).thenReturn(
membership);
event.process(element, jid, request, null);
Assert.assertEquals(5, queue.size());
Packet notification = queue.poll();
Assert.assertEquals(request.getFrom().toString(), notification.getTo()
.toString());
notification = queue.poll();
Assert.assertEquals("romeo@shakespeare.lit", notification.getTo()
.toString());
notification = queue.poll();
Assert.assertEquals("hamlet@shakespeare.lit", notification.getTo()
.toString());
notification = queue.poll();
Assert.assertEquals("user1@server1", notification.getTo().toString());
notification = queue.poll();
Assert.assertEquals("user2@server1", notification.getTo().toString());
Assert.assertEquals(node, notification.getElement().element("event")
.element("subscription").attributeValue("node"));
Assert.assertTrue(notification.toXML().contains(
JabberPubsub.NS_PUBSUB_EVENT));
Assert.assertEquals(
"subscribed",
notification.getElement().element("event")
.element("subscription").attributeValue("subscription"));
Assert.assertEquals(
subscriber,
notification.getElement().element("event")
.element("subscription").attributeValue("jid"));
}
@Test
public void userCanInviteAnotherUserToNode() throws Exception {
JID invitee = new JID("francisco@denmark.lit");
NodeMembership membership = new NodeMembershipImpl(node, invitee,
Subscriptions.none, Affiliations.none, null);
when(dataStore.getNodeMembership(eq(node), eq(invitee))).thenReturn(
membership);
IQ request = readStanzaAsIq("/iq/pubsub/subscribe/invite.stanza");
event.process(element, jid, request, null);
IQ result = (IQ) queue.poll();
Assert.assertEquals(IQ.Type.result, result.getType());
Assert.assertEquals(5, queue.size());
Message notification = (Message) queue.poll();
Element subscription = notification.getElement().element("event")
.element("subscription");
Assert.assertEquals(Subscriptions.invited, Subscriptions
.valueOf(subscription.attributeValue("subscription")));
Assert.assertEquals(invitee,
new JID(subscription.attributeValue("jid")));
}
@Test
public void userCanNotInviteAnotherUserIfTheyDontHaveValidSubscription()
throws Exception {
NodeMembership membership = new NodeMembershipImpl(node, jid,
Subscriptions.pending, Affiliations.member, null);
when(dataStore.getNodeMembership(anyString(), any(JID.class)))
.thenReturn(membership);
IQ request = readStanzaAsIq("/iq/pubsub/subscribe/invite.stanza");
event.process(element, jid, request, null);
IQ result = (IQ) queue.poll();
Assert.assertEquals(IQ.Type.error, result.getType());
Assert.assertEquals(PacketError.Type.auth, result.getError().getType());
Assert.assertEquals(PacketError.Condition.forbidden, result.getError()
.getCondition());
}
@Test
public void invitedByIsSetAsActorJid() throws Exception {
JID invitee = new JID("francisco@denmark.lit");
NodeMembership membership = new NodeMembershipImpl(node, invitee,
Subscriptions.none, Affiliations.none, null);
when(dataStore.getNodeMembership(eq(node), eq(invitee))).thenReturn(
membership);
IQ request = readStanzaAsIq("/iq/pubsub/subscribe/invite.stanza");
event.process(element, jid, request, null);
ArgumentCaptor<NodeSubscription> subscription = ArgumentCaptor
.forClass(NodeSubscription.class);
verify(dataStore, times(1)).addUserSubscription(subscription.capture());
Assert.assertEquals(request.getFrom().toBareJID(), subscription
.getValue().getInvitedBy().toString());
Assert.assertEquals(Subscriptions.invited, subscription.getValue()
.getSubscription());
Assert.assertEquals(node, subscription.getValue().getNodeId());
}
@Test
public void standardSubscribeDoesNotSetInvitedBy() throws Exception {
NodeMembership membership = new NodeMembershipImpl(node, new JID(
subscriber), Subscriptions.subscribed, Affiliations.moderator,
null);
when(dataStore.getNodeMembership(eq(node), any(JID.class))).thenReturn(
membership);
event.process(element, jid, request, null);
IQ result = (IQ) queue.poll();
Assert.assertEquals(IQ.Type.result, result.getType());
ArgumentCaptor<NodeSubscription> subscription = ArgumentCaptor
.forClass(NodeSubscription.class);
verify(dataStore, times(1)).addUserSubscription(subscription.capture());
Assert.assertNull(subscription.getValue().getInvitedBy());
Assert.assertEquals(Subscriptions.subscribed, subscription.getValue()
.getSubscription());
Assert.assertEquals(node, subscription.getValue().getNodeId());
}
@Test
public void sendsNotificationToInvitedUserIfTheyAreLocal() throws Exception {
JID invitee = new JID("francisco@denmark.lit");
NodeMembership membership = new NodeMembershipImpl(node, invitee,
Subscriptions.none, Affiliations.none, null);
when(dataStore.getNodeMembership(eq(node), eq(invitee))).thenReturn(
membership);
IQ request = readStanzaAsIq("/iq/pubsub/subscribe/invite.stanza");
event.process(element, jid, request, null);
IQ result = (IQ) queue.poll();
Assert.assertEquals(IQ.Type.result, result.getType());
Assert.assertEquals(5, queue.size());
Message notification = (Message) queue.poll();
Element subscription = notification.getElement().element("event")
.element("subscription");
Assert.assertEquals(Subscriptions.invited, Subscriptions
.valueOf(subscription.attributeValue("subscription")));
Assert.assertEquals(invitee,
new JID(subscription.attributeValue("jid")));
queue.poll();
queue.poll();
queue.poll();
Assert.assertEquals(invitee, queue.poll().getTo());
}
@Test
public void sendsNotificationToInvitedUsersServerIfTheyAreNotLocal()
throws Exception {
JID invitee = new JID("francisco@denmark.lit");
Configuration.getInstance().remove(
Configuration.CONFIGURATION_LOCAL_DOMAIN_CHECKER);
Configuration.getInstance().putProperty(
Configuration.CONFIGURATION_SERVER_DOMAIN, "shakespeare.lit");
NodeMembership membership = new NodeMembershipImpl(node, invitee,
Subscriptions.none, Affiliations.none, null);
when(dataStore.getNodeMembership(eq(node), eq(invitee))).thenReturn(
membership);
IQ request = readStanzaAsIq("/iq/pubsub/subscribe/invite.stanza");
element.addAttribute("node", "/user/pamela@shakespeare.lit/posts");
event.process(element, jid, request, null);
IQ result = (IQ) queue.poll();
Assert.assertEquals(IQ.Type.result, result.getType());
Assert.assertEquals(5, queue.size());
Message notification = (Message) queue.poll();
Element subscription = notification.getElement().element("event")
.element("subscription");
Assert.assertEquals(Subscriptions.invited, Subscriptions
.valueOf(subscription.attributeValue("subscription")));
Assert.assertEquals(invitee,
new JID(subscription.attributeValue("jid")));
Assert.assertEquals(jid,
new JID(subscription.attributeValue("invited-by")));
queue.poll();
queue.poll();
queue.poll();
Assert.assertEquals(invitee.getDomain(), queue.poll().getTo()
.toString());
}
@Test
public void userCanNotModifyOwnSubscription() throws Exception {
IQ request = this.request.createCopy();
NodeMembership membership = new NodeMembershipImpl(node, new JID(
subscriber), Subscriptions.subscribed, Affiliations.moderator,
null);
when(dataStore.getNodeMembership(eq(node), any(JID.class))).thenReturn(
membership);
event.process(element, new JID("francisco@denmark.lit"), request, null);
IQ result = (IQ) queue.poll();
Assert.assertEquals(IQ.Type.error, result.getType());
PacketError error = result.getError();
Assert.assertEquals(PacketError.Type.cancel, error.getType());
Assert.assertEquals(PacketError.Condition.not_allowed,
error.getCondition());
Assert.assertEquals(SubscriptionEvent.CAN_NOT_MODIFY_OWN_SUBSCRIPTION,
error.getApplicationConditionName());
Assert.assertEquals(Buddycloud.NS_ERROR,
error.getApplicationConditionNamespaceURI());
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.microsoft.azure.eventhubs.exceptioncontracts;
import com.microsoft.azure.eventhubs.CommunicationException;
import com.microsoft.azure.eventhubs.ConnectionStringBuilder;
import com.microsoft.azure.eventhubs.EventData;
import com.microsoft.azure.eventhubs.EventHubClient;
import com.microsoft.azure.eventhubs.EventPosition;
import com.microsoft.azure.eventhubs.PartitionReceiveHandler;
import com.microsoft.azure.eventhubs.PartitionReceiver;
import com.microsoft.azure.eventhubs.PartitionSender;
import com.microsoft.azure.eventhubs.impl.MessagingFactory;
import com.microsoft.azure.eventhubs.lib.ApiTestBase;
import com.microsoft.azure.eventhubs.lib.FaultInjectingReactorFactory;
import com.microsoft.azure.eventhubs.lib.TestContext;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.time.Duration;
import java.time.Instant;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
public class MsgFactoryOpenCloseTest extends ApiTestBase {
private static final String PARTITION_ID = "0";
private static ConnectionStringBuilder connStr;
@BeforeClass
public static void initialize() {
connStr = TestContext.getConnectionString();
}
@Test()
public void verifyTaskQueueEmptyOnMsgFactoryGracefulClose() throws Exception {
final ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
try {
final EventHubClient ehClient = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
executor);
final PartitionReceiver receiver = ehClient.createReceiverSync(
TestContext.getConsumerGroupName(), PARTITION_ID, EventPosition.fromEnqueuedTime(Instant.now()));
final PartitionSender sender = ehClient.createPartitionSenderSync(PARTITION_ID);
sender.sendSync(EventData.create("test data - string".getBytes()));
Iterable<EventData> events = receiver.receiveSync(10);
Assert.assertTrue(events.iterator().hasNext());
sender.closeSync();
receiver.closeSync();
ehClient.closeSync();
Assert.assertEquals(((ScheduledThreadPoolExecutor) executor).getQueue().size(), 0);
} finally {
executor.shutdown();
}
}
@Test()
public void verifyTaskQueueEmptyOnMsgFactoryWithPumpGracefulClose() throws Exception {
final ScheduledExecutorService executor = new ScheduledThreadPoolExecutor(1);
try {
final EventHubClient ehClient = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
executor);
final PartitionReceiver receiver = ehClient.createReceiverSync(
TestContext.getConsumerGroupName(), PARTITION_ID, EventPosition.fromEnqueuedTime(Instant.now()));
final CompletableFuture<Iterable<EventData>> signalReceive = new CompletableFuture<>();
receiver.setReceiveHandler(new PartitionReceiveHandler() {
@Override
public int getMaxEventCount() {
return 10;
}
@Override
public void onReceive(Iterable<EventData> events) {
signalReceive.complete(events);
}
@Override
public void onError(Throwable error) {
}
}, false);
final PartitionSender sender = ehClient.createPartitionSenderSync(PARTITION_ID);
sender.sendSync(EventData.create("test data - string".getBytes()));
final Iterable<EventData> events = signalReceive.get();
Assert.assertTrue(events.iterator().hasNext());
receiver.setReceiveHandler(null).get();
sender.closeSync();
receiver.closeSync();
ehClient.closeSync();
Assert.assertEquals(((ScheduledThreadPoolExecutor) executor).getQueue().size(), 0);
} finally {
executor.shutdown();
}
}
@Test()
public void verifyThreadReleaseOnMsgFactoryOpenError() throws Exception {
final FaultInjectingReactorFactory networkOutageSimulator = new FaultInjectingReactorFactory();
networkOutageSimulator.setFaultType(FaultInjectingReactorFactory.FaultType.NetworkOutage);
final ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
try {
final CompletableFuture<MessagingFactory> openFuture = MessagingFactory.createFromConnectionString(
connStr.toString(), null,
executor,
networkOutageSimulator,
null);
try {
openFuture.get();
Assert.fail();
} catch (ExecutionException error) {
Assert.assertEquals(CommunicationException.class, error.getCause().getClass());
}
// Waiting for reactor to transition from cleanup to complete-stop, this requires at least 60 seconds until
// the items are emptied.
Thread.sleep(Duration.ofSeconds(90).toMillis());
Assert.assertEquals(0, ((ScheduledThreadPoolExecutor) executor).getQueue().size());
} finally {
executor.shutdown();
}
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceToEventHubClient() throws Exception {
final ScheduledExecutorService testClosed = new ScheduledThreadPoolExecutor(1);
testClosed.shutdown();
EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed);
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceToSendOperation() throws Exception {
final ScheduledExecutorService testClosed = Executors.newScheduledThreadPool(1);
final EventHubClient temp = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed);
temp.sendSync(EventData.create("test data - string".getBytes()));
testClosed.shutdown();
temp.sendSync(EventData.create("test data - string".getBytes()));
testClosed.awaitTermination(60, TimeUnit.SECONDS);
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceToReceiveOperation() throws Exception {
final ScheduledExecutorService testClosed = new ScheduledThreadPoolExecutor(1);
final PartitionReceiver temp = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed)
.createReceiverSync(TestContext.getConsumerGroupName(), PARTITION_ID, EventPosition.fromEndOfStream());
testClosed.shutdown();
testClosed.awaitTermination(60, TimeUnit.SECONDS);
temp.receiveSync(20);
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceToCreateLinkOperation() throws Exception {
final ScheduledExecutorService testClosed = Executors.newScheduledThreadPool(1);
final EventHubClient temp = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed);
testClosed.shutdown();
testClosed.awaitTermination(60, TimeUnit.SECONDS);
// first send creates send link
temp.sendSync(EventData.create("test data - string".getBytes()));
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceToCreateSenderOperation() throws Exception {
final ScheduledExecutorService testClosed = new ScheduledThreadPoolExecutor(1);
final EventHubClient temp = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed);
testClosed.shutdown();
testClosed.awaitTermination(60, TimeUnit.SECONDS);
temp.createPartitionSenderSync(PARTITION_ID);
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceToCreateReceiverOperation() throws Exception {
final ScheduledExecutorService testClosed = Executors.newScheduledThreadPool(1);
final EventHubClient temp = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed);
testClosed.shutdown();
testClosed.awaitTermination(60, TimeUnit.SECONDS);
temp.createReceiverSync(TestContext.getConsumerGroupName(), PARTITION_ID, EventPosition.fromEndOfStream());
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceThenMgmtOperation() throws Throwable {
final ScheduledThreadPoolExecutor testClosed = new ScheduledThreadPoolExecutor(1);
final EventHubClient temp = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed);
testClosed.shutdown();
testClosed.awaitTermination(60, TimeUnit.SECONDS);
try {
temp.getPartitionRuntimeInformation(PARTITION_ID).get();
} catch (ExecutionException ex) {
throw ex.getCause();
}
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceThenFactoryCloseOperation() throws Exception {
final ScheduledExecutorService testClosed = Executors.newScheduledThreadPool(1);
final EventHubClient temp = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed);
testClosed.shutdown();
testClosed.awaitTermination(60, TimeUnit.SECONDS);
temp.closeSync();
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceThenSenderCloseOperation() throws Exception {
final ScheduledThreadPoolExecutor testClosed = new ScheduledThreadPoolExecutor(1);
final PartitionSender temp = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed).createPartitionSenderSync(PARTITION_ID);
testClosed.shutdown();
testClosed.awaitTermination(60, TimeUnit.SECONDS);
temp.closeSync();
}
@Test(expected = RejectedExecutionException.class)
public void supplyClosedExecutorServiceThenReceiverCloseOperation() throws Exception {
final ScheduledExecutorService testClosed = Executors.newScheduledThreadPool(1);
final PartitionReceiver temp = EventHubClient.createFromConnectionStringSync(
TestContext.getConnectionString().toString(),
testClosed).createReceiverSync(TestContext.getConsumerGroupName(), PARTITION_ID, EventPosition.fromEndOfStream());
testClosed.shutdown();
testClosed.awaitTermination(60, TimeUnit.SECONDS);
temp.closeSync();
}
@Test(expected = RejectedExecutionException.class)
public void testEventHubClientSendAfterClose() throws Exception {
final ConnectionStringBuilder connectionString = TestContext.getConnectionString();
final EventHubClient eventHubClient = EventHubClient.createFromConnectionStringSync(connectionString.toString(), TestContext.EXECUTOR_SERVICE);
eventHubClient.closeSync();
eventHubClient.sendSync(EventData.create("test message".getBytes()));
}
@Test(expected = IllegalStateException.class)
public void testEventHubClientSendCloseAfterSomeSends() throws Exception {
final ConnectionStringBuilder connectionString = TestContext.getConnectionString();
final EventHubClient eventHubClient = EventHubClient.createFromConnectionStringSync(connectionString.toString(), TestContext.EXECUTOR_SERVICE);
eventHubClient.sendSync(EventData.create("test message".getBytes()));
eventHubClient.closeSync();
eventHubClient.sendSync(EventData.create("test message".getBytes()));
}
}
| |
/*
* Copyright 2011-2013 Tyler Blair. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice, this list
* of conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation are those of the
* authors and contributors and should not be interpreted as representing official policies,
* either expressed or implied, of anybody else.
*/
package net.lankylord.dontswear;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.net.Proxy;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.UUID;
import java.util.logging.Level;
import org.bukkit.Bukkit;
import org.bukkit.configuration.InvalidConfigurationException;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.plugin.Plugin;
import org.bukkit.plugin.PluginDescriptionFile;
import org.bukkit.scheduler.BukkitTask;
public final class MetricsLite {
/**
* The current revision number
*/
private final static int REVISION = 6;
/**
* The base url of the metrics domain
*/
private static final String BASE_URL = "http://mcstats.org";
/**
* The url used to report a server's status
*/
private static final String REPORT_URL = "/report/%s";
/**
* Interval of time to ping (in minutes)
*/
private final static int PING_INTERVAL = 10;
/**
* The plugin this metrics submits for
*/
private final Plugin plugin;
/**
* The plugin configuration file
*/
private final YamlConfiguration configuration;
/**
* The plugin configuration file
*/
private final File configurationFile;
/**
* Unique server id
*/
private final String guid;
/**
* Debug mode
*/
private final boolean debug;
/**
* Lock for synchronization
*/
private final Object optOutLock = new Object();
/**
* Id of the scheduled task
*/
private volatile BukkitTask task = null;
public MetricsLite(Plugin plugin) throws IOException {
if (plugin == null) {
throw new IllegalArgumentException("Plugin cannot be null");
}
this.plugin = plugin;
// load the config
configurationFile = getConfigFile();
configuration = YamlConfiguration.loadConfiguration(configurationFile);
// add some defaults
configuration.addDefault("opt-out", false);
configuration.addDefault("guid", UUID.randomUUID().toString());
configuration.addDefault("debug", false);
// Do we need to create the file?
if (configuration.get("guid", null) == null) {
configuration.options().header("http://mcstats.org").copyDefaults(true);
configuration.save(configurationFile);
}
// Load the guid then
guid = configuration.getString("guid");
debug = configuration.getBoolean("debug", false);
}
/**
* Start measuring statistics. This will immediately create an async
* repeating task as the plugin and send the initial data to the metrics
* backend, and then after that it will post in increments of PING_INTERVAL
* * 1200 ticks.
*
* @return True if statistics measuring is running, otherwise false.
*/
public boolean start() {
synchronized (optOutLock) {
// Did we opt out?
if (isOptOut()) {
return false;
}
// Is metrics already running?
if (task != null) {
return true;
}
// Begin hitting the server with glorious data
task = plugin.getServer().getScheduler().runTaskTimerAsynchronously(plugin, new Runnable() {
private boolean firstPost = true;
@Override
public void run() {
try {
// This has to be synchronized or it can collide with the disable method.
synchronized (optOutLock) {
// Disable Task, if it is running and the server owner decided to opt-out
if (isOptOut() && task != null) {
task.cancel();
task = null;
}
}
// We use the inverse of firstPost because if it is the first time we are posting,
// it is not a interval ping, so it evaluates to FALSE
// Each time thereafter it will evaluate to TRUE, i.e PING!
postPlugin(!firstPost);
// After the first post we set firstPost to false
// Each post thereafter will be a ping
firstPost = false;
} catch (IOException e) {
if (debug) {
Bukkit.getLogger().log(Level.INFO, "[Metrics] {0}", e.getMessage());
}
}
}
}, 0, PING_INTERVAL * 1200);
return true;
}
}
/**
* Has the server owner denied plugin metrics?
*
* @return true if metrics should be opted out of it
*/
public boolean isOptOut() {
synchronized (optOutLock) {
try {
// Reload the metrics file
configuration.load(getConfigFile());
} catch (IOException | InvalidConfigurationException ex) {
if (debug) {
Bukkit.getLogger().log(Level.INFO, "[Metrics] {0}", ex.getMessage());
}
return true;
}
return configuration.getBoolean("opt-out", false);
}
}
/**
* Enables metrics for the server by setting "opt-out" to false in the
* config file and starting the metrics task.
*
* @throws java.io.IOException
*/
public void enable() throws IOException {
// This has to be synchronized or it can collide with the check in the task.
synchronized (optOutLock) {
// Check if the server owner has already set opt-out, if not, set it.
if (isOptOut()) {
configuration.set("opt-out", false);
configuration.save(configurationFile);
}
// Enable Task, if it is not running
if (task == null) {
start();
}
}
}
/**
* Disables metrics for the server by setting "opt-out" to true in the
* config file and canceling the metrics task.
*
* @throws java.io.IOException
*/
public void disable() throws IOException {
// This has to be synchronized or it can collide with the check in the task.
synchronized (optOutLock) {
// Check if the server owner has already set opt-out, if not, set it.
if (!isOptOut()) {
configuration.set("opt-out", true);
configuration.save(configurationFile);
}
// Disable Task, if it is running
if (task != null) {
task.cancel();
task = null;
}
}
}
/**
* Gets the File object of the config file that should be used to store data
* such as the GUID and opt-out status
*
* @return the File object for the config file
*/
public File getConfigFile() {
// I believe the easiest way to get the base folder (e.g craftbukkit set via -P) for plugins to use
// is to abuse the plugin object we already have
// plugin.getDataFolder() => base/plugins/PluginA/
// pluginsFolder => base/plugins/
// The base is not necessarily relative to the startup directory.
File pluginsFolder = plugin.getDataFolder().getParentFile();
// return => base/plugins/PluginMetrics/config.yml
return new File(new File(pluginsFolder, "PluginMetrics"), "config.yml");
}
/**
* Generic method that posts a plugin to the metrics website
*/
private void postPlugin(boolean isPing) throws IOException {
// Server software specific section
PluginDescriptionFile description = plugin.getDescription();
String pluginName = description.getName();
boolean onlineMode = Bukkit.getServer().getOnlineMode(); // TRUE if online mode is enabled
String pluginVersion = description.getVersion();
String serverVersion = Bukkit.getVersion();
int playersOnline = Bukkit.getServer().getOnlinePlayers().length;
// END server software specific section -- all code below does not use any code outside of this class / Java
// Construct the post data
final StringBuilder data = new StringBuilder();
// The plugin's description file containg all of the plugin data such as name, version, author, etc
data.append(encode("guid")).append('=').append(encode(guid));
encodeDataPair(data, "version", pluginVersion);
encodeDataPair(data, "server", serverVersion);
encodeDataPair(data, "players", Integer.toString(playersOnline));
encodeDataPair(data, "revision", String.valueOf(REVISION));
// New data as of R6
String osname = System.getProperty("os.name");
String osarch = System.getProperty("os.arch");
String osversion = System.getProperty("os.version");
String java_version = System.getProperty("java.version");
int coreCount = Runtime.getRuntime().availableProcessors();
// normalize os arch .. amd64 -> x86_64
if (osarch.equals("amd64")) {
osarch = "x86_64";
}
encodeDataPair(data, "osname", osname);
encodeDataPair(data, "osarch", osarch);
encodeDataPair(data, "osversion", osversion);
encodeDataPair(data, "cores", Integer.toString(coreCount));
encodeDataPair(data, "online-mode", Boolean.toString(onlineMode));
encodeDataPair(data, "java_version", java_version);
// If we're pinging, append it
if (isPing) {
encodeDataPair(data, "ping", "true");
}
// Create the url
URL url = new URL(BASE_URL + String.format(REPORT_URL, encode(pluginName)));
// Connect to the website
URLConnection connection;
// Mineshafter creates a socks proxy, so we can safely bypass it
// It does not reroute POST requests so we need to go around it
if (isMineshafterPresent()) {
connection = url.openConnection(Proxy.NO_PROXY);
} else {
connection = url.openConnection();
}
connection.setDoOutput(true);
final BufferedReader reader;
final String response;
try (OutputStreamWriter writer = new OutputStreamWriter(connection.getOutputStream())) {
writer.write(data.toString());
writer.flush();
reader = new BufferedReader(new InputStreamReader(connection.getInputStream()));
response = reader.readLine();
}
reader.close();
if (response == null || response.startsWith("ERR")) {
throw new IOException(response); //Throw the exception
}
}
/**
* Check if mineshafter is present. If it is, we need to bypass it to send
* POST requests
*
* @return true if mineshafter is installed on the server
*/
private boolean isMineshafterPresent() {
try {
Class.forName("mineshafter.MineServer");
return true;
} catch (Exception e) {
return false;
}
}
/**
* <p>Encode a key/value data pair to be used in a HTTP post request. This
* INCLUDES a & so the first key/value pair MUST be included manually,
* e.g:</p>
* <code>
* StringBuffer data = new StringBuffer();
* data.append(encode("guid")).append('=').append(encode(guid));
* encodeDataPair(data, "version", description.getVersion());
* </code>
*
* @param buffer the stringbuilder to append the data pair onto
* @param key the key value
* @param value the value
*/
private static void encodeDataPair(final StringBuilder buffer, final String key, final String value) throws UnsupportedEncodingException {
buffer.append('&').append(encode(key)).append('=').append(encode(value));
}
/**
* Encode text as UTF-8
*
* @param text the text to encode
* @return the encoded text, as UTF-8
*/
private static String encode(final String text) throws UnsupportedEncodingException {
return URLEncoder.encode(text, "UTF-8");
}
}
| |
/*
* (c) Copyright Christian P. Fries, Germany. Contact: email@christian-fries.de.
*
* Created on 21.01.2004
*/
package net.finmath.montecarlo.assetderivativevaluation.products;
import java.util.ArrayList;
import net.finmath.exception.CalculationException;
import net.finmath.functions.AnalyticFormulas;
import net.finmath.montecarlo.RandomVariableFromDoubleArray;
import net.finmath.montecarlo.assetderivativevaluation.AssetModelMonteCarloSimulationModel;
import net.finmath.montecarlo.assetderivativevaluation.MonteCarloBlackScholesModel;
import net.finmath.montecarlo.conditionalexpectation.MonteCarloConditionalExpectationRegression;
import net.finmath.montecarlo.process.component.barrier.Barrier;
import net.finmath.stochastic.RandomVariable;
/**
* Implements pricing of a European stock option.
*
* @author Christian Fries
* @version 1.2
* @since finmath-lib 4.1.0
*/
public class EuropeanOptionWithBoundary extends AbstractAssetMonteCarloProduct {
enum BoundaryAdjustmentType {
LINEAR_ANALYTIC,
LINEAR_PROPAGATED,
SIMPLE_SUPERHEDGE,
SIMPLE_SUBHEDGE,
LINEAR_REGRESSED
}
private final double boundary = 4;
private final boolean isBoundaryTimeDep = true;
private final BoundaryAdjustmentType boundaryAdjustmentType = BoundaryAdjustmentType.LINEAR_REGRESSED;
private final double maturity;
private final double strike;
/**
* Create an European option.
*
* @param maturity The maturity of the European option.
* @param strike The strike of the European option.
*/
public EuropeanOptionWithBoundary(final double maturity, final double strike) {
super();
this.maturity = maturity;
this.strike = strike;
}
/**
* This method returns the value random variable of the product within the specified model, evaluated at a given evalutationTime.
* Note: For a lattice this is often the value conditional to evalutationTime, for a Monte-Carlo simulation this is the (sum of) value discounted to evaluation time.
* Cashflows prior evaluationTime are not considered.
*
* @param evaluationTime The time on which this products value should be observed.
* @param model The model used to price the product.
* @return The random variable representing the value of the product discounted to evaluation time
* @throws net.finmath.exception.CalculationException Thrown if the valuation fails, specific cause may be available via the <code>cause()</code> method.
*/
@Override
public RandomVariable getValue(final double evaluationTime, final AssetModelMonteCarloSimulationModel model) throws CalculationException {
// Get underlying and numeraire
final RandomVariable underlyingAtMaturity = model.getAssetValue(maturity,0);
// The payoff
final RandomVariable values = underlyingAtMaturity.sub(strike).floor(0.0);
// Discounting...
final RandomVariable numeraireAtMaturity = model.getNumeraire(maturity);
final RandomVariable monteCarloWeights = model.getMonteCarloWeights(maturity);
values.div(numeraireAtMaturity).mult(monteCarloWeights);
// ...to evaluation time.
final RandomVariable numeraireAtZero = model.getNumeraire(evaluationTime);
final RandomVariable monteCarloProbabilitiesAtZero = model.getMonteCarloWeights(evaluationTime);
values.mult(numeraireAtZero).div(monteCarloProbabilitiesAtZero);
final RandomVariableFromDoubleArray prob = new RandomVariableFromDoubleArray(0.0,1.0);
prob.mult(monteCarloWeights).div(monteCarloProbabilitiesAtZero);
prob.sub(1.0);
prob.mult(-1.0);
values.add(getBoundaryAdjustment(evaluationTime, maturity, model, values));
return values;
}
public RandomVariable getBoundaryAdjustment(final double fromTime, final double toTime, final AssetModelMonteCarloSimulationModel model, final RandomVariable continuationValues) throws CalculationException {
final RandomVariableFromDoubleArray values = new RandomVariableFromDoubleArray(0,0);
final int fromTimeIndex = model.getTimeIndex(fromTime);
final double fromTimeNext = model.getTime(fromTimeIndex+1);
if(fromTimeNext < toTime) {
final RandomVariable monteCarloProbabilitiesEnd = model.getMonteCarloWeights(fromTimeNext);
final RandomVariable monteCarloProbabilitiesStart = model.getMonteCarloWeights(fromTime);
final RandomVariable monteCarloProbabilitiesTransition = monteCarloProbabilitiesEnd.div(monteCarloProbabilitiesStart);
final double riskFreeRate = ((MonteCarloBlackScholesModel)model).getModel().getRiskFreeRate().doubleValue();
final RandomVariable remainingBoundaryAdjustment = this.getBoundaryAdjustment(fromTimeNext, toTime, model, continuationValues);
remainingBoundaryAdjustment.mult(monteCarloProbabilitiesTransition).mult(Math.exp(-riskFreeRate*(fromTimeNext-fromTime)));
values.add(remainingBoundaryAdjustment);
}
final MonteCarloBlackScholesModel modelBlackScholes = (MonteCarloBlackScholesModel)model;
final double spot = modelBlackScholes.getModel().getInitialState(modelBlackScholes.getProcess())[0].doubleValue();
final double riskFreeRate = modelBlackScholes.getModel().getRiskFreeRate().doubleValue();
final double volatility = modelBlackScholes.getModel().getVolatility().doubleValue();
double boundaryLocal = spot*Math.exp(riskFreeRate*maturity + boundary * 0.25 * Math.sqrt(maturity));
// boundaryLocal = boundary;
if(isBoundaryTimeDep) {
boundaryLocal = spot*Math.exp(riskFreeRate*fromTimeNext + boundary * 0.25 * Math.sqrt(fromTimeNext));
// double boundaryLocal = 1*Math.exp(riskFreeRate*fromTimeNext - 0.5 * volatility * volatility*fromTimeNext + boundary * Math.sqrt(fromTimeNext));
// double boundaryLocal = 1*Math.exp(riskFreeRate*maturity + boundary * Math.sqrt(fromTimeNext));
}
// Boundary adjustment for one time step
final RandomVariable underlying = model.getAssetValue(fromTime,0);
final double optionMaturity = fromTimeNext-fromTime;
final double optionStrike = boundaryLocal;
final double[] boundaryAdjustmentValues = new double[underlying.size()];
double c = 0;
double d = 0;
if(boundaryAdjustmentType == BoundaryAdjustmentType.LINEAR_ANALYTIC) {
c = AnalyticFormulas.blackScholesOptionValue(boundaryLocal, riskFreeRate, volatility, toTime-fromTimeNext, strike);
d = AnalyticFormulas.blackScholesOptionDelta(boundaryLocal, riskFreeRate, volatility, toTime-fromTimeNext, strike);
}
else if(boundaryAdjustmentType == BoundaryAdjustmentType.LINEAR_PROPAGATED) {
c = Math.exp(-riskFreeRate * (fromTimeNext-fromTime)) * boundaryLocal;
d = Math.exp(-riskFreeRate * (toTime-fromTimeNext));
}
else if(boundaryAdjustmentType == BoundaryAdjustmentType.LINEAR_REGRESSED) {
final RandomVariable weight = new RandomVariableFromDoubleArray(1.0);
final MonteCarloConditionalExpectationRegression condExpEstimator = new MonteCarloConditionalExpectationRegression(getRegressionBasisFunctions(toTime, model, weight));
// Calculate cond. expectation. Note that no discounting (numeraire division) is required!
final double[] paremetersRegressed = condExpEstimator.getLinearRegressionParameters(continuationValues.mult(weight));
c = paremetersRegressed[0] + boundaryLocal * paremetersRegressed[1];
d = paremetersRegressed[1];
}
else if(boundaryAdjustmentType == BoundaryAdjustmentType.SIMPLE_SUPERHEDGE) {
c = boundaryLocal;
d = 1;
}
else if(boundaryAdjustmentType == BoundaryAdjustmentType.SIMPLE_SUBHEDGE) {
c = boundaryLocal-strike;//*Math.exp(-riskFreeRate * (toTime-fromTimeNext));
d = 1;
}
for(int i=0; i<underlying.size(); i++) {
final double initialStockValue = underlying.get(i);
final double a = AnalyticFormulas.blackScholesOptionValue(initialStockValue, riskFreeRate, volatility, optionMaturity, optionStrike);
final double b = AnalyticFormulas.blackScholesDigitalOptionValue(initialStockValue, riskFreeRate, volatility, optionMaturity, optionStrike);
boundaryAdjustmentValues[i] = c * b + d * a;
}
final RandomVariableFromDoubleArray boundaryAdjustment = boundaryAdjustmentValues.length == 1 ? new RandomVariableFromDoubleArray(0.0, boundaryAdjustmentValues[0]) : new RandomVariableFromDoubleArray(0.0, boundaryAdjustmentValues);
values.add(boundaryAdjustment);
return values;
}
/**
* Return the regression basis functions suitable for this product.
*
* @param exerciseDate The exercise date.
* @param model The model to use.
* @param weight A weight to apply per path (a random variable)
* @return Vector of regression basis functions (vector of random variables).
* @throws net.finmath.exception.CalculationException Thrown if the valuation fails, specific cause may be available via the <code>cause()</code> method.
*/
private RandomVariable[] getRegressionBasisFunctions(final double exerciseDate, final AssetModelMonteCarloSimulationModel model, final RandomVariable weight) throws CalculationException {
final ArrayList<RandomVariable> basisFunctions = new ArrayList<>();
RandomVariable basisFunction;
// Constant
basisFunction = new RandomVariableFromDoubleArray(exerciseDate, 1.0);
basisFunctions.add(basisFunction.mult(weight));
// Underlying
basisFunction = model.getAssetValue(exerciseDate, 0);
basisFunctions.add(basisFunction.mult(weight));
return basisFunctions.toArray(new RandomVariable[0]);
}
/**
* @author Christian Fries
*
*/
public class ConstantBarrier implements Barrier {
private final AssetModelMonteCarloSimulationModel scheme;
public ConstantBarrier(final AssetModelMonteCarloSimulationModel scheme) {
super();
this.scheme = scheme;
}
@Override
public RandomVariableFromDoubleArray[] getBarrierDirection(final int timeIndex, final RandomVariable[] realizationPredictor) {
if(timeIndex >= scheme.getTimeDiscretization().getNumberOfTimeSteps()+1) {
return null;
}
final RandomVariableFromDoubleArray[] barrierDirection = new RandomVariableFromDoubleArray[1];
barrierDirection[0] = new RandomVariableFromDoubleArray(0.0, 1.0);
return barrierDirection;
}
@Override
public RandomVariableFromDoubleArray getBarrierLevel(final int timeIndex, final RandomVariable[] realizationPredictor) throws CalculationException {
if(timeIndex >= scheme.getTimeDiscretization().getNumberOfTimeSteps()+1) {
return null;
}
final double simulationTime = scheme.getTime(timeIndex);
final double riskFreeRate = ((MonteCarloBlackScholesModel)scheme).getModel().getRiskFreeRate().doubleValue();
final double volatility = ((MonteCarloBlackScholesModel)scheme).getModel().getVolatility().doubleValue();
double boundaryLocal = 1*Math.exp(riskFreeRate*maturity + boundary * 0.25 * Math.sqrt(maturity));
if(isBoundaryTimeDep) {
boundaryLocal = 1*Math.exp(riskFreeRate*simulationTime + boundary * 0.25 * Math.sqrt(simulationTime));
}
// double boundaryLocal = 1*Math.exp(riskFreeRate*simulationTime - 0.5 * volatility * volatility*simulationTime + boundary * volatility * Math.sqrt(simulationTime));
// double boundaryLocal = 1*Math.exp(riskFreeRate*maturity + boundary * Math.sqrt(simulationTime));
final RandomVariableFromDoubleArray barrierLevel = new RandomVariableFromDoubleArray(simulationTime, Math.log(boundaryLocal));
final RandomVariable underlying = scheme.getAssetValue(timeIndex-1, 0);
barrierLevel.sub(underlying.log());
// barrierLevel.sub((riskFreeRate)*scheme.getTimeDiscretization().getTimeStep(timeIndex-1));
barrierLevel.sub((riskFreeRate-0.5*volatility*volatility)*scheme.getTimeDiscretization().getTimeStep(timeIndex-1));
return barrierLevel;
}
/* (non-Javadoc)
* @see net.finmath.montecarlo.BarrierInterface#isUpperBarrier()
*/
@Override
public boolean isUpperBarrier() {
return true;
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.type;
import com.facebook.presto.operator.scalar.AbstractTestFunctions;
import com.facebook.presto.spi.type.SqlTime;
import com.facebook.presto.spi.type.SqlTimeWithTimeZone;
import com.facebook.presto.spi.type.SqlTimestamp;
import com.facebook.presto.spi.type.SqlTimestampWithTimeZone;
import com.facebook.presto.spi.type.TimeZoneKey;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.testng.annotations.Test;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.TimeType.TIME;
import static com.facebook.presto.spi.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE;
import static com.facebook.presto.spi.type.TimeZoneKey.getTimeZoneKey;
import static com.facebook.presto.spi.type.TimeZoneKey.getTimeZoneKeyForOffset;
import static com.facebook.presto.spi.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.spi.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.testing.TestingSession.testSessionBuilder;
import static com.facebook.presto.type.IntervalDayTimeType.INTERVAL_DAY_TIME;
public abstract class TestTimeWithTimeZoneBase
extends AbstractTestFunctions
{
private static final DateTimeZone WEIRD_ZONE = DateTimeZone.forOffsetHoursMinutes(7, 9);
private static final TimeZoneKey WEIRD_TIME_ZONE_KEY = getTimeZoneKeyForOffset(7 * 60 + 9);
protected TestTimeWithTimeZoneBase(boolean legacyTimestamp)
{
super(testSessionBuilder()
.setSystemProperty("legacy_timestamp", String.valueOf(legacyTimestamp))
.setTimeZoneKey(getTimeZoneKey("+06:09"))
.build());
}
@Test
public void testLiteral()
{
assertFunction("TIME '03:04:05.321 +07:09'",
TIME_WITH_TIME_ZONE,
new SqlTimeWithTimeZone(new DateTime(1970, 1, 1, 3, 4, 5, 321, WEIRD_ZONE).getMillis(), WEIRD_TIME_ZONE_KEY));
assertFunction("TIME '03:04:05 +07:09'",
TIME_WITH_TIME_ZONE,
new SqlTimeWithTimeZone(new DateTime(1970, 1, 1, 3, 4, 5, 0, WEIRD_ZONE).getMillis(), WEIRD_TIME_ZONE_KEY));
assertFunction("TIME '03:04 +07:09'",
TIME_WITH_TIME_ZONE,
new SqlTimeWithTimeZone(new DateTime(1970, 1, 1, 3, 4, 0, 0, WEIRD_ZONE).getMillis(), WEIRD_TIME_ZONE_KEY));
assertFunction("TIME '3:4:5.321+07:09'",
TIME_WITH_TIME_ZONE,
new SqlTimeWithTimeZone(new DateTime(1970, 1, 1, 3, 4, 5, 321, WEIRD_ZONE).getMillis(), WEIRD_TIME_ZONE_KEY));
assertFunction("TIME '3:4:5+07:09'",
TIME_WITH_TIME_ZONE,
new SqlTimeWithTimeZone(new DateTime(1970, 1, 1, 3, 4, 5, 0, WEIRD_ZONE).getMillis(), WEIRD_TIME_ZONE_KEY));
assertFunction("TIME '3:4+07:09'",
TIME_WITH_TIME_ZONE,
new SqlTimeWithTimeZone(new DateTime(1970, 1, 1, 3, 4, 0, 0, WEIRD_ZONE).getMillis(), WEIRD_TIME_ZONE_KEY));
}
@Test
public void testSubstract()
{
functionAssertions.assertFunctionString("TIME '14:15:16.432 +07:09' - TIME '03:04:05.321 +08:09'",
INTERVAL_DAY_TIME,
"0 12:11:11.111");
functionAssertions.assertFunctionString("TIME '03:04:05.321 +08:09' - TIME '14:15:16.432 +07:09'",
INTERVAL_DAY_TIME,
"-0 12:11:11.111");
}
@Test
public void testEqual()
{
assertFunction("TIME '03:04:05.321 +07:09' = TIME '03:04:05.321 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' = TIME '02:04:05.321 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' = TIME '02:04:05.321'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' = TIME '03:04:05.333 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' = TIME '02:04:05.333 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' = TIME '02:04:05.333'", BOOLEAN, false);
}
@Test
public void testNotEqual()
{
assertFunction("TIME '03:04:05.321 +07:09' <> TIME '03:04:05.333 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' <> TIME '02:04:05.333 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' <> TIME '02:04:05.333'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' <> TIME '03:04:05.321 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' <> TIME '02:04:05.321 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' <> TIME '02:04:05.321'", BOOLEAN, false);
}
@Test
public void testLessThan()
{
assertFunction("TIME '03:04:05.321 +07:09' < TIME '03:04:05.333 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' < TIME '02:04:05.333 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' < TIME '02:04:05.333'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' < TIME '03:04:05.321 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' < TIME '02:04:05.321 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' < TIME '02:04:05.321'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' < TIME '03:04:05 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' < TIME '02:04:05 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' < TIME '02:04:05'", BOOLEAN, false);
}
@Test
public void testLessThanOrEqual()
{
assertFunction("TIME '03:04:05.321 +07:09' <= TIME '03:04:05.333 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' <= TIME '02:04:05.333 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' <= TIME '02:04:05.333'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' <= TIME '03:04:05.321 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' <= TIME '02:04:05.321 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' <= TIME '02:04:05.321'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' <= TIME '03:04:05 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' <= TIME '02:04:05 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' <= TIME '02:04:05'", BOOLEAN, false);
}
@Test
public void testGreaterThan()
{
assertFunction("TIME '03:04:05.321 +07:09' > TIME '03:04:05.111 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' > TIME '02:04:05.111 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' > TIME '02:04:05.111'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' > TIME '03:04:05.321 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' > TIME '02:04:05.321 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' > TIME '02:04:05.321'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' > TIME '03:04:05.333 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' > TIME '02:04:05.333 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' > TIME '02:04:05.333'", BOOLEAN, false);
}
@Test
public void testGreaterThanOrEqual()
{
assertFunction("TIME '03:04:05.321 +07:09' >= TIME '03:04:05.111 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' >= TIME '02:04:05.111 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' >= TIME '02:04:05.111'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' >= TIME '03:04:05.321 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' >= TIME '02:04:05.321 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' >= TIME '02:04:05.321'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' >= TIME '03:04:05.333 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' >= TIME '02:04:05.333 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' >= TIME '02:04:05.333'", BOOLEAN, false);
}
@Test
public void testBetween()
{
assertFunction("TIME '03:04:05.321 +07:09' between TIME '03:04:05.111 +07:09' and TIME '03:04:05.333 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.111 +06:09' and TIME '02:04:05.333 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.111' and TIME '02:04:05.333'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '03:04:05.321 +07:09' and TIME '03:04:05.333 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.321 +06:09' and TIME '02:04:05.333 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.321' and TIME '02:04:05.333'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '03:04:05.111 +07:09' and TIME '03:04:05.321 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.111 +06:09' and TIME '02:04:05.321 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.111' and TIME '02:04:05.321'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '03:04:05.321 +07:09' and TIME '03:04:05.321 +07:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.321 +06:09' and TIME '02:04:05.321 +06:09'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.321' and TIME '02:04:05.321'", BOOLEAN, true);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '03:04:05.322 +07:09' and TIME '03:04:05.333 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.322 +06:09' and TIME '02:04:05.333 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.322' and TIME '02:04:05.333'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '03:04:05.311 +07:09' and TIME '03:04:05.312 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.311 +06:09' and TIME '02:04:05.312 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.311' and TIME '02:04:05.312'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '03:04:05.333 +07:09' and TIME '03:04:05.111 +07:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.333 +06:09' and TIME '02:04:05.111 +06:09'", BOOLEAN, false);
assertFunction("TIME '03:04:05.321 +07:09' between TIME '02:04:05.333' and TIME '02:04:05.111'", BOOLEAN, false);
}
@Test
public void testCastToTime()
{
assertFunction("cast(TIME '03:04:05.321 +07:09' as time)",
TIME,
new SqlTime(new DateTime(1970, 1, 1, 3, 4, 5, 321, WEIRD_ZONE).getMillis(), session.getTimeZoneKey()));
}
@Test
public void testCastToTimestamp()
{
assertFunction("cast(TIME '03:04:05.321 +07:09' as timestamp)",
TIMESTAMP,
new SqlTimestamp(new DateTime(1970, 1, 1, 3, 4, 5, 321, WEIRD_ZONE).getMillis(), session.getTimeZoneKey()));
}
@Test
public void testCastToTimestampWithTimeZone()
{
assertFunction("cast(TIME '03:04:05.321 +07:09' as timestamp with time zone)",
TIMESTAMP_WITH_TIME_ZONE,
new SqlTimestampWithTimeZone(new DateTime(1970, 1, 1, 3, 4, 5, 321, WEIRD_ZONE).getMillis(), WEIRD_TIME_ZONE_KEY));
}
@Test
public void testCastToSlice()
{
assertFunction("cast(TIME '03:04:05.321 +07:09' as varchar)", VARCHAR, "03:04:05.321 +07:09");
assertFunction("cast(TIME '03:04:05 +07:09' as varchar)", VARCHAR, "03:04:05.000 +07:09");
assertFunction("cast(TIME '03:04 +07:09' as varchar)", VARCHAR, "03:04:00.000 +07:09");
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.securityhub.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/securityhub-2018-10-26/GetFindings" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetFindingsRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The findings attributes used to define a condition to filter the findings returned.
* </p>
*/
private AwsSecurityFindingFilters filters;
/**
* <p>
* Findings attributes used to sort the list of findings returned.
* </p>
*/
private java.util.List<SortCriterion> sortCriteria;
/**
* <p>
* Paginates results. On your first call to the <code>GetFindings</code> operation, set the value of this parameter
* to <code>NULL</code>. For subsequent calls to the operation, fill <code>nextToken</code> in the request with the
* value of <code>nextToken</code> from the previous response to continue listing data.
* </p>
*/
private String nextToken;
/**
* <p>
* The maximum number of findings to return.
* </p>
*/
private Integer maxResults;
/**
* <p>
* The findings attributes used to define a condition to filter the findings returned.
* </p>
*
* @param filters
* The findings attributes used to define a condition to filter the findings returned.
*/
public void setFilters(AwsSecurityFindingFilters filters) {
this.filters = filters;
}
/**
* <p>
* The findings attributes used to define a condition to filter the findings returned.
* </p>
*
* @return The findings attributes used to define a condition to filter the findings returned.
*/
public AwsSecurityFindingFilters getFilters() {
return this.filters;
}
/**
* <p>
* The findings attributes used to define a condition to filter the findings returned.
* </p>
*
* @param filters
* The findings attributes used to define a condition to filter the findings returned.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetFindingsRequest withFilters(AwsSecurityFindingFilters filters) {
setFilters(filters);
return this;
}
/**
* <p>
* Findings attributes used to sort the list of findings returned.
* </p>
*
* @return Findings attributes used to sort the list of findings returned.
*/
public java.util.List<SortCriterion> getSortCriteria() {
return sortCriteria;
}
/**
* <p>
* Findings attributes used to sort the list of findings returned.
* </p>
*
* @param sortCriteria
* Findings attributes used to sort the list of findings returned.
*/
public void setSortCriteria(java.util.Collection<SortCriterion> sortCriteria) {
if (sortCriteria == null) {
this.sortCriteria = null;
return;
}
this.sortCriteria = new java.util.ArrayList<SortCriterion>(sortCriteria);
}
/**
* <p>
* Findings attributes used to sort the list of findings returned.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setSortCriteria(java.util.Collection)} or {@link #withSortCriteria(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param sortCriteria
* Findings attributes used to sort the list of findings returned.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetFindingsRequest withSortCriteria(SortCriterion... sortCriteria) {
if (this.sortCriteria == null) {
setSortCriteria(new java.util.ArrayList<SortCriterion>(sortCriteria.length));
}
for (SortCriterion ele : sortCriteria) {
this.sortCriteria.add(ele);
}
return this;
}
/**
* <p>
* Findings attributes used to sort the list of findings returned.
* </p>
*
* @param sortCriteria
* Findings attributes used to sort the list of findings returned.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetFindingsRequest withSortCriteria(java.util.Collection<SortCriterion> sortCriteria) {
setSortCriteria(sortCriteria);
return this;
}
/**
* <p>
* Paginates results. On your first call to the <code>GetFindings</code> operation, set the value of this parameter
* to <code>NULL</code>. For subsequent calls to the operation, fill <code>nextToken</code> in the request with the
* value of <code>nextToken</code> from the previous response to continue listing data.
* </p>
*
* @param nextToken
* Paginates results. On your first call to the <code>GetFindings</code> operation, set the value of this
* parameter to <code>NULL</code>. For subsequent calls to the operation, fill <code>nextToken</code> in the
* request with the value of <code>nextToken</code> from the previous response to continue listing data.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* Paginates results. On your first call to the <code>GetFindings</code> operation, set the value of this parameter
* to <code>NULL</code>. For subsequent calls to the operation, fill <code>nextToken</code> in the request with the
* value of <code>nextToken</code> from the previous response to continue listing data.
* </p>
*
* @return Paginates results. On your first call to the <code>GetFindings</code> operation, set the value of this
* parameter to <code>NULL</code>. For subsequent calls to the operation, fill <code>nextToken</code> in the
* request with the value of <code>nextToken</code> from the previous response to continue listing data.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* Paginates results. On your first call to the <code>GetFindings</code> operation, set the value of this parameter
* to <code>NULL</code>. For subsequent calls to the operation, fill <code>nextToken</code> in the request with the
* value of <code>nextToken</code> from the previous response to continue listing data.
* </p>
*
* @param nextToken
* Paginates results. On your first call to the <code>GetFindings</code> operation, set the value of this
* parameter to <code>NULL</code>. For subsequent calls to the operation, fill <code>nextToken</code> in the
* request with the value of <code>nextToken</code> from the previous response to continue listing data.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetFindingsRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The maximum number of findings to return.
* </p>
*
* @param maxResults
* The maximum number of findings to return.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of findings to return.
* </p>
*
* @return The maximum number of findings to return.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of findings to return.
* </p>
*
* @param maxResults
* The maximum number of findings to return.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetFindingsRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFilters() != null)
sb.append("Filters: ").append(getFilters()).append(",");
if (getSortCriteria() != null)
sb.append("SortCriteria: ").append(getSortCriteria()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetFindingsRequest == false)
return false;
GetFindingsRequest other = (GetFindingsRequest) obj;
if (other.getFilters() == null ^ this.getFilters() == null)
return false;
if (other.getFilters() != null && other.getFilters().equals(this.getFilters()) == false)
return false;
if (other.getSortCriteria() == null ^ this.getSortCriteria() == null)
return false;
if (other.getSortCriteria() != null && other.getSortCriteria().equals(this.getSortCriteria()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFilters() == null) ? 0 : getFilters().hashCode());
hashCode = prime * hashCode + ((getSortCriteria() == null) ? 0 : getSortCriteria().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
return hashCode;
}
@Override
public GetFindingsRequest clone() {
return (GetFindingsRequest) super.clone();
}
}
| |
/*
* Copyright (C) 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.gapid;
import android.app.IntentService;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.util.Base64;
import android.util.DisplayMetrics;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.ByteArrayOutputStream;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
/**
* An {@link IntentService} subclass for providing installed package information to GAPIS / GAPIC.
* <p/>
* When the service is sent the {@link #ACTION_SEND_PKG_INFO} action, the service will begin
* listening on the supplied local-abstract socket provided in the {@link #EXTRA_SOCKET_NAME} extra,
* or if the extra is absent, {@link #DEFAULT_SOCKET_NAME}. When an incoming connection to this
* socket is made, the service will send the installed package information on the accepted
* connection, then close the accepted connection and the listening socket.
*/
public class PackageInfoService extends IntentService {
private static final String TAG = "gapid-pkginfo";
private static final int BASE_ICON_DENSITY = DisplayMetrics.DENSITY_MEDIUM;
/**
* Action used to start waiting for an incoming connection on the local-abstract port
* {@link #EXTRA_SOCKET_NAME}. When a connection is made, the package information is send to the
* connected socket, the socket is closed and the service stops listening on
* {@link #EXTRA_SOCKET_NAME}.
*/
private static final String ACTION_SEND_PKG_INFO = "com.google.android.gapid.action.SEND_PKG_INFO";
/**
* Optional parameter for {@link #ACTION_SEND_PKG_INFO} that changes the local-abstract port
* used to listen for incoming connections. The default value is {@link #DEFAULT_SOCKET_NAME}.
*/
private static final String EXTRA_SOCKET_NAME = "com.google.android.gapid.extra.SOCKET_NAME";
/**
* Optional parameter for {@link #ACTION_SEND_PKG_INFO} that makes the service include icons.
*/
private static final String EXTRA_INCLUDE_ICONS = "com.google.android.gapid.extra.INCLUDE_ICONS";
/**
* Optional parameter for {@link #ACTION_SEND_PKG_INFO} that scales the icon density.
*/
private static final String EXTRA_ICON_DENSITY_SCALE = "com.google.android.gapid.extra.ICON_DENSITY_SCALE";
/**
* Optional parameter for {@link #ACTION_SEND_PKG_INFO} that makes the service only report
* debuggable packages, for use on production Android builds.
*/
private static final String EXTRA_ONLY_DEBUG = "com.google.android.gapid.extra.ONLY_DEBUG";
/**
* The default socket name when {@link #EXTRA_SOCKET_NAME} is not provided.
*/
private static final String DEFAULT_SOCKET_NAME = "gapid-pkginfo";
public PackageInfoService() {
super("PackageInfoService");
}
@Override
protected void onHandleIntent(Intent intent) {
if (intent != null) {
final String action = intent.getAction();
if (ACTION_SEND_PKG_INFO.equals(action)) {
String socketName = intent.getStringExtra(EXTRA_SOCKET_NAME);
if (socketName == null) {
socketName = DEFAULT_SOCKET_NAME;
}
boolean onlyDebug = intent.getBooleanExtra(EXTRA_ONLY_DEBUG, false);
boolean includeIcons = intent.getBooleanExtra(EXTRA_INCLUDE_ICONS, false);
float iconDensityScale = intent.getFloatExtra(EXTRA_ICON_DENSITY_SCALE, 1.0f);
handleSendPackageInfo(socketName, onlyDebug, includeIcons, iconDensityScale);
}
}
}
/**
* Handler for the {@link #ACTION_SEND_PKG_INFO} intent.
*/
private void handleSendPackageInfo(
final String socketName,
final boolean onlyDebug,
final boolean includeIcons,
final float iconDensityScale) {
final ExecutorService executor = Executors.newCachedThreadPool();
final IconStore icons = new IconStore((int)(BASE_ICON_DENSITY * iconDensityScale));
final PackageManager pm = getPackageManager();
final Future<List<PackageInfo>> installedPackagesFuture = executor.submit(
new Callable<List<PackageInfo>>() {
@Override
public List<PackageInfo> call() throws Exception {
List<PackageInfo> packages = pm.getInstalledPackages(
PackageManager.GET_ACTIVITIES | PackageManager.GET_SIGNATURES);
return packages;
}
});
Callable<byte[]> packageInfoFuture = new Callable<byte[]>() {
@Override
public byte[] call() throws Exception {
String json = getPackageInfo(includeIcons ? icons : null, onlyDebug, pm, installedPackagesFuture.get());
return json.getBytes("UTF-8");
}
};
try {
SocketWriter.connectAndWrite(socketName, executor.submit(packageInfoFuture));
} catch (Exception ex) {
Log.e(TAG, "Error occurred", ex);
} finally {
executor.shutdown();
}
}
private String getPackageInfo(
IconStore icons,
boolean onlyDebug,
PackageManager pm,
List<PackageInfo> packages) throws JSONException {
// The ApplicationInfo.primaryCpuAbi field is hidden. Use reflection to get at it.
Field primaryCpuAbiField = null;
try {
primaryCpuAbiField = ApplicationInfo.class.getField("primaryCpuAbi");
} catch (NoSuchFieldException e) {
Log.w(TAG, "Unable to find 'primaryCpuAbi' ApplicationInfo hidden field");
}
JSONArray packagesJson = new JSONArray();
for (PackageInfo packageInfo : packages) {
ApplicationInfo applicationInfo = packageInfo.applicationInfo;
boolean isDebuggable =
applicationInfo != null &&
(applicationInfo.flags & ApplicationInfo.FLAG_DEBUGGABLE) > 0;
if (!isDebuggable && onlyDebug) {
continue;
}
JSONObject packageJson = getPackageJson(
pm, packageInfo, icons, primaryCpuAbiField, isDebuggable);
packagesJson.put(packageJson);
}
JSONObject root = new JSONObject();
root.put("packages", packagesJson);
root.put("icons", icons != null ? icons.json() : new JSONArray());
return root.toString();
}
private JSONObject getPackageJson(
PackageManager pm,
PackageInfo packageInfo,
IconStore icons,
Field primaryCpuAbiField,
boolean isDebuggable) throws JSONException {
ApplicationInfo applicationInfo = packageInfo.applicationInfo;
Intent launchIntent = pm.getLaunchIntentForPackage(packageInfo.packageName);
ActivityInfo launchActivityInfo = null;
if (launchIntent != null) {
launchActivityInfo = launchIntent.resolveActivityInfo(pm, 0);
}
Resources resources = null;
try {
resources = pm.getResourcesForApplication(applicationInfo);
} catch (PackageManager.NameNotFoundException ex) {}
Map<String, List<IntentFilter>> activityIntents = new HashMap<String, List<IntentFilter>>();
Intent queryIntent = new Intent();
queryIntent.setPackage(packageInfo.packageName);
List<ResolveInfo> resolveInfos = pm.queryIntentActivities(queryIntent, PackageManager.GET_RESOLVED_FILTER);
for (ResolveInfo resolveInfo : resolveInfos) {
IntentFilter intent = resolveInfo.filter;
if (intent == null) {
continue;
}
List<IntentFilter> intents = activityIntents.get(resolveInfo.activityInfo.name);
if (intents == null) {
intents = new ArrayList<IntentFilter>();
activityIntents.put(resolveInfo.activityInfo.name, intents);
}
intents.add(intent);
}
JSONArray activitiesJson = new JSONArray();
if (packageInfo.activities != null) {
for (ActivityInfo activityInfo : packageInfo.activities) {
int iconIndex = -1;
if (icons != null) {
iconIndex = icons.add(resources, activityInfo.icon);
}
boolean isLaunchActivity = (launchActivityInfo != null) ?
launchActivityInfo.name.equals(activityInfo.name) : false;
JSONArray actionsJson = new JSONArray();
List<IntentFilter> intents = activityIntents.get(activityInfo.name);
if (intents != null) {
for (IntentFilter intent : intents) {
for (int i = 0; i < intent.countActions(); i++) {
String action = intent.getAction(i);
JSONObject actionJson = new JSONObject();
actionJson.put("name", action);
if (isLaunchActivity) {
actionJson.put("isLaunch", action.equals(launchIntent.getAction()));
}
actionsJson.put(actionJson);
}
}
}
JSONObject activityJson = new JSONObject();
activityJson.put("name", activityInfo.name);
activityJson.put("icon", iconIndex);
activityJson.put("actions", actionsJson);
activitiesJson.put(activityJson);
}
}
int iconIndex = -1;
String primaryCpuAbi = null;
if (applicationInfo != null) {
if (icons != null) {
iconIndex = icons.add(resources, applicationInfo.icon);
}
if (primaryCpuAbiField != null) {
try {
primaryCpuAbi = (String) primaryCpuAbiField.get(applicationInfo);
} catch (Exception e) {
Log.w(TAG, "Exception thrown accessing 'primaryCpuAbi': " + e.getMessage());
}
}
}
JSONObject packageJson = new JSONObject();
packageJson.put("name", packageInfo.packageName);
packageJson.put("debuggable", isDebuggable);
packageJson.put("icon", iconIndex);
if (primaryCpuAbi != null) {
packageJson.put("abi", primaryCpuAbi);
}
packageJson.put("activities", activitiesJson);
return packageJson;
}
/**
* IconStore stores all {@link Drawable}s as PNG, base-64 encoded images.
* Duplicates are only stored once.
*/
private class IconStore {
private final Map<String, Integer> mMap = new HashMap();
private final IdentityHashMap<Bitmap, Integer> mBitmapMap = new IdentityHashMap<Bitmap, Integer>();
private final JSONArray mJson = new JSONArray();
private final int iconDensity;
IconStore(int iconDensity) {
this.iconDensity = iconDensity;
}
/**
* add adds the specified drawable to the store.
*
* @return The index of the image stored in the {@link JSONArray} returned by {@link #json}.
*/
public int add(Resources resources, int iconId) {
if (resources == null || iconId <= 0) {
return -1;
}
Drawable drawable = null;
try {
drawable = resources.getDrawableForDensity(iconId, iconDensity);
} catch (Resources.NotFoundException ex) {
return -1;
}
if (drawable == null || !(drawable instanceof BitmapDrawable)) {
return -1;
}
Bitmap bitmap = ((BitmapDrawable) drawable).getBitmap();
if (mBitmapMap.containsKey(bitmap)) {
return mBitmapMap.get(bitmap);
}
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, stream);
byte[] pngBytes = stream.toByteArray();
String pngBase64 = Base64.encodeToString(pngBytes, Base64.NO_WRAP);
if (!mMap.containsKey(pngBase64)) {
int index = mJson.length();
mMap.put(pngBase64, index);
mBitmapMap.put(bitmap, index);
mJson.put(pngBase64);
return index;
} else {
return mMap.get(pngBase64);
}
}
/**
* @return The {@link JSONArray} object holding all the base-64, PNG encoded images.
*/
public JSONArray json() {
return mJson;
}
}
}
| |
package com.sequenceiq.cloudbreak.cloud.azure;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.ui.freemarker.FreeMarkerConfigurationFactoryBean;
import com.sequenceiq.cloudbreak.cloud.context.CloudContext;
import com.sequenceiq.cloudbreak.cloud.model.DatabaseServer;
import com.sequenceiq.cloudbreak.cloud.model.DatabaseStack;
import com.sequenceiq.cloudbreak.cloud.model.Location;
import com.sequenceiq.cloudbreak.cloud.model.Network;
import com.sequenceiq.cloudbreak.cloud.model.Region;
import com.sequenceiq.cloudbreak.cloud.model.Subnet;
import com.sequenceiq.cloudbreak.common.json.JsonUtil;
import com.sequenceiq.cloudbreak.util.FreeMarkerTemplateUtils;
import com.sequenceiq.common.api.type.OutboundInternetTraffic;
import freemarker.template.Template;
@ExtendWith(MockitoExtension.class)
public class AzureTemplateBuilderDbTest {
private static final String LATEST_ARM_DB_TEMPLATE_PATH = "templates/arm-dbstack.ftl";
private static final Long STACK_ID = 1234L;
private static final String STACK_NAME = "myStack";
private static final String STACK_CRN = "crn";
private static final String PLATFORM = "Azure";
private static final String VARIANT = "";
private static final String REGION = "westus2";
private static final String ACCOUNT_ID = UUID.randomUUID().toString();
private static final String SUBNET_CIDR = "0.0.0.0/0";
private static final String RESOURCE_GROUP = "rg";
private static final String SUBNET_ID = "subnet-lkewflerwkj";
private static final String FULL_SUBNET_ID = RESOURCE_GROUP + '/' + SUBNET_ID;
private static final String NETWORK_CIDR = "127.0.0.1/32";
private static final String SERVER_ID = "myServer";
private static final String ROOT_USER_NAME = "boss";
private static final String ROOT_PASSWORD = "godmode";
private static final String KEY_URL = "keyVaultUrl";
private static final String KEY_VAULT_RESOURCE_GROUP_NAME = "keyVaultResourceGroupName";
@Spy
private FreeMarkerTemplateUtils freeMarkerTemplateUtils;
@InjectMocks
private AzureDatabaseTemplateBuilder underTest;
@Mock
private AzureDatabaseTemplateProvider azureDatabaseTemplateProvider;
@Mock
private AzureUtils azureUtils;
private CloudContext cloudContext;
private FreeMarkerConfigurationFactoryBean factoryBean;
static Iterable<?> templatesPathDataProvider() {
return List.of(LATEST_ARM_DB_TEMPLATE_PATH);
}
@BeforeEach
void setUp() throws Exception {
factoryBean = new FreeMarkerConfigurationFactoryBean();
factoryBean.setPreferFileSystemAccess(false);
factoryBean.setTemplateLoaderPath("classpath:/");
factoryBean.afterPropertiesSet();
cloudContext = CloudContext.Builder.builder()
.withId(STACK_ID)
.withName(STACK_NAME)
.withCrn(STACK_CRN)
.withPlatform(PLATFORM)
.withVariant(VARIANT)
.withLocation(Location.location(Region.region(REGION)))
.withAccountId(ACCOUNT_ID)
.build();
}
@ParameterizedTest(name = "{0}")
@MethodSource("templatesPathDataProvider")
void buildTestWhenUseSslEnforcementFalse(String templatePath) {
buildTestWhenUseSslEnforcementInternal(templatePath, false);
}
@ParameterizedTest(name = "{0}")
@MethodSource("templatesPathDataProvider")
void buildTestWhenUseSslEnforcementTrue(String templatePath) {
buildTestWhenUseSslEnforcementInternal(templatePath, true);
}
@Test
void buildTestWhenDataEncryptionParametersPresent() {
Template template = Optional.ofNullable(factoryBean.getObject())
.map(config -> {
try {
return config.getTemplate("templates/arm-dbstack.ftl", "UTF-8");
} catch (IOException e) {
throw new IllegalStateException(e);
}
}).orElseThrow();
Subnet subnet = new Subnet(SUBNET_CIDR);
Network network = new Network(subnet, List.of(NETWORK_CIDR), OutboundInternetTraffic.ENABLED);
network.putParameter("subnets", FULL_SUBNET_ID);
Map<String, Object> params = new HashMap<String, Object>();
params.put("dbVersion", "10");
params.put(KEY_URL, "https://dummyVault.vault.azure.net/keys/dummyKey/dummyVersion");
params.put(KEY_VAULT_RESOURCE_GROUP_NAME, "dummyResourceGroup");
DatabaseServer databaseServer = DatabaseServer.builder()
.serverId(SERVER_ID)
.rootUserName(ROOT_USER_NAME)
.rootPassword(ROOT_PASSWORD)
.location(REGION)
.params(params)
.build();
DatabaseStack databaseStack = new DatabaseStack(network, databaseServer, Collections.emptyMap(), template.toString());
Mockito.when(azureDatabaseTemplateProvider.getTemplate(databaseStack)).thenReturn(template);
Mockito.when(azureUtils.encodeString(SUBNET_ID)).thenReturn("hash");
String result = underTest.build(cloudContext, databaseStack);
assertThat(JsonUtil.isValid(result)).overridingErrorMessage("Invalid JSON: " + result).isTrue();
assertThat(result).contains("\"keyVaultName\": {\n" +
" \"type\": \"string\",\n" +
" \"defaultValue\" : \"dummyVault\",\n" +
" \"metadata\": {\n" +
" \"description\": \"Key vault name where the key to use is stored\"\n" +
" }\n" +
" },\n" +
" \"keyVaultResourceGroupName\": {\n" +
" \"type\": \"string\",\n" +
" \"defaultValue\" : \"dummyResourceGroup\",\n" +
" \"metadata\": {\n" +
" \"description\": \"Key vault resource group name where it is stored\"\n" +
" }\n" +
" },\n" +
" \"keyName\": {\n" +
" \"type\": \"string\",\n" +
" \"defaultValue\" : \"dummyKey\",\n" +
" \"metadata\": {\n" +
" \"description\": \"Key name in the key vault to use as encryption protector\"\n" +
" }\n" +
" },\n" +
" \"keyVersion\": {\n" +
" \"type\": \"string\",\n" +
" \"defaultValue\" : \"dummyVersion\",\n" +
" \"metadata\": {\n" +
" \"description\": \"Version of the key in the key vault to use as encryption protector\"\n" +
" }\n" +
" }");
}
@Test
void buildTestWhenDataEncryptionParametersPresentAndKeyVersionError() {
Template template = Optional.ofNullable(factoryBean.getObject())
.map(config -> {
try {
return config.getTemplate("templates/arm-dbstack.ftl", "UTF-8");
} catch (IOException e) {
throw new IllegalStateException(e);
}
}).orElseThrow();
Subnet subnet = new Subnet(SUBNET_CIDR);
Network network = new Network(subnet, List.of(NETWORK_CIDR), OutboundInternetTraffic.ENABLED);
network.putParameter("subnets", FULL_SUBNET_ID);
Map<String, Object> params = new HashMap<String, Object>();
params.put("dbVersion", "10");
params.put(KEY_URL, "https://dummyVault.vault.azure.net/keys/dummyKey");
params.put(KEY_VAULT_RESOURCE_GROUP_NAME, "dummyResourceGroup");
DatabaseServer databaseServer = DatabaseServer.builder()
.serverId(SERVER_ID)
.rootUserName(ROOT_USER_NAME)
.rootPassword(ROOT_PASSWORD)
.location(REGION)
.params(params)
.build();
DatabaseStack databaseStack = new DatabaseStack(network, databaseServer, Collections.emptyMap(), template.toString());
assertThrows(IllegalArgumentException.class, () -> underTest.build(cloudContext, databaseStack));
}
private void buildTestWhenUseSslEnforcementInternal(String templatePath, boolean useSslEnforcement) {
Template template = Optional.ofNullable(factoryBean.getObject())
.map(config -> {
try {
return config.getTemplate(templatePath, "UTF-8");
} catch (IOException e) {
throw new IllegalStateException(e);
}
}).orElseThrow();
DatabaseStack databaseStack = createDatabaseStack(useSslEnforcement, template.toString());
Mockito.when(azureDatabaseTemplateProvider.getTemplate(databaseStack)).thenReturn(template);
Mockito.when(azureUtils.encodeString(SUBNET_ID)).thenReturn("hash");
String result = underTest.build(cloudContext, databaseStack);
assertThat(JsonUtil.isValid(result)).overridingErrorMessage("Invalid JSON: " + result).isTrue();
assertThat(result).contains(
" \"useSslEnforcement\": {\n" +
" \"type\": \"bool\",\n" +
" \"defaultValue\": " + useSslEnforcement + ",");
assertThat(result).contains(
" \"privateEndpointName\": {\n" +
" \"defaultValue\": \"pe-hash-to-myServer\",\n" +
" \"type\": \"String\"\n" +
" }"
);
}
private DatabaseStack createDatabaseStack(boolean useSslEnforcement, String template) {
Subnet subnet = new Subnet(SUBNET_CIDR);
Network network = new Network(subnet, List.of(NETWORK_CIDR), OutboundInternetTraffic.ENABLED);
network.putParameter("subnets", FULL_SUBNET_ID);
DatabaseServer databaseServer = DatabaseServer.builder()
.useSslEnforcement(useSslEnforcement)
.serverId(SERVER_ID)
.rootUserName(ROOT_USER_NAME)
.rootPassword(ROOT_PASSWORD)
.location(REGION)
.params(Map.of("dbVersion", "10"))
.build();
return new DatabaseStack(network, databaseServer, Collections.emptyMap(), template);
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer.extractor;
import com.google.android.exoplayer.C;
import com.google.android.exoplayer.upstream.DataSource;
import java.io.EOFException;
import java.io.IOException;
import java.util.Arrays;
/**
* An {@link ExtractorInput} that wraps a {@link DataSource}.
*/
public final class DefaultExtractorInput implements ExtractorInput {
private static final byte[] SCRATCH_SPACE = new byte[4096];
private final DataSource dataSource;
private final long streamLength;
private long position;
private byte[] peekBuffer;
private int peekBufferPosition;
private int peekBufferLength;
/**
* @param dataSource The wrapped {@link DataSource}.
* @param position The initial position in the stream.
* @param length The length of the stream, or {@link C#LENGTH_UNBOUNDED} if it is unknown.
*/
public DefaultExtractorInput(DataSource dataSource, long position, long length) {
this.dataSource = dataSource;
this.position = position;
this.streamLength = length;
peekBuffer = new byte[8 * 1024];
}
@Override
public int read(byte[] target, int offset, int length) throws IOException, InterruptedException {
if (Thread.interrupted()) {
throw new InterruptedException();
}
int peekBytes = Math.min(peekBufferLength, length);
System.arraycopy(peekBuffer, 0, target, offset, peekBytes);
offset += peekBytes;
length -= peekBytes;
int bytesRead = length != 0 ? dataSource.read(target, offset, length) : 0;
if (bytesRead == C.RESULT_END_OF_INPUT) {
return C.RESULT_END_OF_INPUT;
}
updatePeekBuffer(peekBytes);
bytesRead += peekBytes;
position += bytesRead;
return bytesRead;
}
@Override
public boolean readFully(byte[] target, int offset, int length, boolean allowEndOfInput)
throws IOException, InterruptedException {
int peekBytes = Math.min(peekBufferLength, length);
System.arraycopy(peekBuffer, 0, target, offset, peekBytes);
offset += peekBytes;
int remaining = length - peekBytes;
while (remaining > 0) {
if (Thread.interrupted()) {
throw new InterruptedException();
}
int bytesRead = dataSource.read(target, offset, remaining);
if (bytesRead == C.RESULT_END_OF_INPUT) {
if (allowEndOfInput && remaining == length) {
return false;
}
throw new EOFException();
}
offset += bytesRead;
remaining -= bytesRead;
}
updatePeekBuffer(peekBytes);
position += length;
return true;
}
@Override
public void readFully(byte[] target, int offset, int length)
throws IOException, InterruptedException {
readFully(target, offset, length, false);
}
@Override
public void skipFully(int length) throws IOException, InterruptedException {
int peekBytes = Math.min(peekBufferLength, length);
int remaining = length - peekBytes;
while (remaining > 0) {
if (Thread.interrupted()) {
throw new InterruptedException();
}
int bytesRead = dataSource.read(SCRATCH_SPACE, 0, Math.min(SCRATCH_SPACE.length, remaining));
if (bytesRead == C.RESULT_END_OF_INPUT) {
throw new EOFException();
}
remaining -= bytesRead;
}
updatePeekBuffer(peekBytes);
position += length;
}
@Override
public void peekFully(byte[] target, int offset, int length)
throws IOException, InterruptedException {
ensureSpaceForPeek(length);
int peekBytes = Math.min(peekBufferLength - peekBufferPosition, length);
System.arraycopy(peekBuffer, peekBufferPosition, target, offset, peekBytes);
offset += peekBytes;
int fillBytes = length - peekBytes;
int remaining = fillBytes;
int writePosition = peekBufferLength;
while (remaining > 0) {
if (Thread.interrupted()) {
throw new InterruptedException();
}
int bytesRead = dataSource.read(peekBuffer, writePosition, remaining);
if (bytesRead == C.RESULT_END_OF_INPUT) {
throw new EOFException();
}
System.arraycopy(peekBuffer, writePosition, target, offset, bytesRead);
remaining -= bytesRead;
writePosition += bytesRead;
offset += bytesRead;
}
peekBufferPosition += length;
peekBufferLength += fillBytes;
}
@Override
public void advancePeekPosition(int length) throws IOException, InterruptedException {
ensureSpaceForPeek(length);
int peekBytes = Math.min(peekBufferLength - peekBufferPosition, length);
int fillBytes = length - peekBytes;
int remaining = fillBytes;
int writePosition = peekBufferLength;
while (remaining > 0) {
if (Thread.interrupted()) {
throw new InterruptedException();
}
int bytesRead = dataSource.read(peekBuffer, writePosition, remaining);
if (bytesRead == C.RESULT_END_OF_INPUT) {
throw new EOFException();
}
remaining -= bytesRead;
writePosition += bytesRead;
}
peekBufferPosition += length;
peekBufferLength += fillBytes;
}
@Override
public void resetPeekPosition() {
peekBufferPosition = 0;
}
@Override
public long getPosition() {
return position;
}
@Override
public long getLength() {
return streamLength;
}
/**
* Ensures {@code peekBuffer} is large enough to store at least {@code length} bytes from the
* current peek position.
*/
private void ensureSpaceForPeek(int length) {
int requiredLength = peekBufferPosition + length;
if (requiredLength > peekBuffer.length) {
peekBuffer = Arrays.copyOf(peekBuffer, Math.max(peekBuffer.length * 2, requiredLength));
}
}
/**
* Updates the peek buffer's length, position and contents after consuming data.
*
* @param bytesConsumed The number of bytes consumed from the peek buffer.
*/
private void updatePeekBuffer(int bytesConsumed) {
peekBufferLength -= bytesConsumed;
peekBufferPosition = 0;
System.arraycopy(peekBuffer, bytesConsumed, peekBuffer, 0, peekBufferLength);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.app.service.runtime;
import java.io.InputStream;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import org.activiti.app.domain.runtime.RelatedContent;
import org.activiti.app.repository.runtime.RelatedContentRepository;
import org.activiti.content.storage.api.ContentObject;
import org.activiti.content.storage.api.ContentStorage;
import org.activiti.engine.identity.User;
import org.activiti.engine.runtime.Clock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionSynchronizationAdapter;
import org.springframework.transaction.support.TransactionSynchronizationManager;
/**
* @author Frederik Heremans
*/
@Service
public class RelatedContentService {
private static final int RELATED_CONTENT_INTERNAL_BATCH_SIZE = 256;
@Autowired
protected RelatedContentRepository contentRepository;
@Autowired
protected ContentStorage contentStorage;
@Autowired
protected Clock clock;
public Page<RelatedContent> getRelatedContent(String source, String sourceId, int pageSize, int page) {
PageRequest paging = new PageRequest(page, pageSize);
return contentRepository.findAllRelatedBySourceAndSourceId(source, sourceId, paging);
}
public Page<RelatedContent> getRelatedContentForTask(String taskId, int pageSize, int page) {
PageRequest paging = new PageRequest(page, pageSize);
return contentRepository.findAllRelatedByTaskId(taskId, paging);
}
public Page<RelatedContent> getRelatedContentForProcessInstance(String processInstanceId, int pageSize, int page) {
PageRequest paging = new PageRequest(page, pageSize);
return contentRepository.findAllRelatedByProcessInstanceId(processInstanceId, paging);
}
public Page<RelatedContent> getFieldContentForProcessInstance(String processInstanceId, String field, int pageSize, int page) {
PageRequest paging = new PageRequest(page, pageSize);
return contentRepository.findAllByProcessInstanceIdAndField(processInstanceId, field, paging);
}
public Page<RelatedContent> getFieldContentForTask(String taskId, int pageSize, int page) {
PageRequest paging = new PageRequest(page, pageSize);
return contentRepository.findAllFieldBasedContentByTaskId(taskId, paging);
}
public Page<RelatedContent> getAllFieldContentForProcessInstance(String processInstanceId, int pageSize, int page) {
PageRequest paging = new PageRequest(page, pageSize);
return contentRepository.findAllFieldBasedContentByProcessInstanceId(processInstanceId, paging);
}
public Page<RelatedContent> getAllFieldContentForTask(String taskId, String field, int pageSize, int page) {
PageRequest paging = new PageRequest(page, pageSize);
return contentRepository.findAllByTaskIdAndField(taskId, field, paging);
}
@Transactional
public RelatedContent createRelatedContent(User user, String name, String source, String sourceId, String taskId,
String processId, String field, String mimeType, InputStream data, Long lengthHint) {
return createRelatedContent(user, name, source, sourceId, taskId, processId,
mimeType, data, lengthHint, false, false, field);
}
@Transactional
public RelatedContent createRelatedContent(User user, String name, String source, String sourceId, String taskId,
String processId, String mimeType, InputStream data, Long lengthHint, boolean relatedContent, boolean link) {
return createRelatedContent(user, name, source, sourceId, taskId, processId, mimeType, data, lengthHint, relatedContent, link, null);
}
protected RelatedContent createRelatedContent(User user, String name, String source, String sourceId, String taskId,
String processId, String mimeType, InputStream data, Long lengthHint, boolean relatedContent, boolean link, String field) {
Date timestamp = clock.getCurrentTime();
final RelatedContent newContent = new RelatedContent();
newContent.setName(name);
newContent.setSource(source);
newContent.setSourceId(sourceId);
newContent.setTaskId(taskId);
newContent.setProcessInstanceId(processId);
newContent.setCreatedBy(user.getId());
newContent.setCreated(timestamp);
newContent.setLastModifiedBy(user.getId());
newContent.setLastModified(timestamp);
newContent.setMimeType(mimeType);
newContent.setRelatedContent(relatedContent);
newContent.setLink(link);
newContent.setField(field);
if (data != null) {
// Stream given, write to store and save a reference to the content object
ContentObject createContentObject = contentStorage.createContentObject(data, lengthHint);
newContent.setContentStoreId(createContentObject.getId());
newContent.setContentAvailable(true);
// After storing the stream, store the length to be accessible without having to consult the
// underlying content storage to get file size
newContent.setContentSize(createContentObject.getContentLength());
} else {
if (link) {
// Mark content as available, since it will never be fetched and copied
newContent.setContentAvailable(true);
} else {
// Content not (yet) available
newContent.setContentAvailable(false);
}
}
contentRepository.save(newContent);
return newContent;
}
public RelatedContent getRelatedContent(Long id, boolean includeOwner) {
RelatedContent content = contentRepository.findOne(id);
if (content != null && includeOwner) {
// Touch related entities
content.getCheckoutOwner();
content.getLockOwner();
}
return content;
}
@Transactional
public void deleteRelatedContent(RelatedContent content) {
if (content.getContentStoreId() != null) {
final String storeId = content.getContentStoreId();
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() {
@Override
public void afterCommit() {
contentStorage.deleteContentObject(storeId);
}
});
}
contentRepository.delete(content);
}
@Transactional
public boolean lockContent(RelatedContent content, int timeOut, User user) {
content.setLockDate(clock.getCurrentTime());
content.setLocked(true);
content.setLockOwner(user.getId());
// Set expiration date based on timeout
Calendar expiration = Calendar.getInstance();
expiration.setTime(content.getLockDate());
expiration.add(Calendar.SECOND, timeOut);
content.setLockExpirationDate(expiration.getTime());
contentRepository.save(content);
return true;
}
@Transactional
public boolean checkout(RelatedContent content, User user, boolean toLocal) {
content.setCheckoutDate(clock.getCurrentTime());
content.setCheckedOut(true);
content.setCheckedOutToLocal(toLocal);
content.setCheckoutOwner(user.getId());
contentRepository.save(content);
return true;
}
@Transactional
public boolean unlock(RelatedContent content) {
content.setLockDate(null);
content.setLockExpirationDate(null);
content.setLockOwner(null);
content.setLocked(false);
contentRepository.save(content);
return true;
}
@Transactional
public boolean uncheckout(RelatedContent content) {
content.setCheckoutDate(null);
content.setCheckedOut(false);
content.setCheckedOutToLocal(false);
content.setCheckoutOwner(null);
contentRepository.save(content);
return true;
}
@Transactional
public boolean checkin(RelatedContent content, String comment, boolean keepCheckedOut) {
if (!keepCheckedOut) {
content.setCheckoutDate(null);
content.setCheckedOut(false);
content.setCheckoutOwner(null);
// TODO: store comment
contentRepository.save(content);
return true;
}
return false;
}
@Transactional
public void updateRelatedContentData(Long relatedContentId, String contentStoreId, InputStream contentStream, Long lengthHint, User user) {
Date timestamp = clock.getCurrentTime();
ContentObject updatedContent = contentStorage.updateContentObject(contentStoreId, contentStream, lengthHint);
RelatedContent relatedContent = contentRepository.findOne(relatedContentId);
relatedContent.setLastModifiedBy(user.getId());
relatedContent.setLastModified(timestamp);
relatedContent.setContentSize(lengthHint);
contentRepository.save(relatedContent);
}
@Transactional
public void updateName(Long relatedContentId, String newName) {
RelatedContent relatedContent = contentRepository.findOne(relatedContentId);
relatedContent.setName(newName);
contentRepository.save(relatedContent);
}
/**
* Marks a piece of content as permanent and flags it being used as selected content in the given field,
* for the given process instance id and (optional) task id.
*/
@Transactional
public void setContentField(Long relatedContentId, String field, String processInstanceId, String taskId) {
final RelatedContent relatedContent = contentRepository.findOne(relatedContentId);
relatedContent.setProcessInstanceId(processInstanceId);
relatedContent.setTaskId(taskId);
relatedContent.setRelatedContent(false);
relatedContent.setField(field);
contentRepository.save(relatedContent);
}
@Transactional
public void storeRelatedContent(RelatedContent relatedContent) {
contentRepository.save(relatedContent);
}
public ContentStorage getContentStorage() {
return contentStorage;
}
/**
* Deletes all content related to the given process instance. This includes all field content for a process instance, all
* field content on tasks and all related content on tasks. The raw content data will also be removed from content storage
* as well as all renditions and rendition data.
*/
@Transactional
public void deleteContentForProcessInstance(String processInstanceId) {
int page = 0;
Page<RelatedContent> content = contentRepository.findAllContentByProcessInstanceId(
processInstanceId, new PageRequest(page, RELATED_CONTENT_INTERNAL_BATCH_SIZE));
final Set<String> storageIds = new HashSet<String>();
// Loop over all content, cascading any referencing entities
while (content!= null) {
for (RelatedContent relatedContent : content.getContent()) {
if (relatedContent.getContentStoreId() != null) {
storageIds.add(relatedContent.getContentStoreId());
}
}
// Get next page, if needed
if (!content.isLast()) {
page++;
content = contentRepository.findAllContentByProcessInstanceId(
processInstanceId, new PageRequest(page, RELATED_CONTENT_INTERNAL_BATCH_SIZE));
} else {
content = null;
}
}
// Delete raw content AFTER transaction has been committed to prevent missing content on rollback
if(!storageIds.isEmpty()) {
TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronizationAdapter() {
@Override
public void afterCommit() {
for(String id : storageIds) {
contentStorage.deleteContentObject(id);
}
}
});
}
// Batch delete all RelatedContent entities
contentRepository.deleteAllContentByProcessInstanceId(processInstanceId);
}
}
| |
package Graphic;
import java.awt.Color;
import java.awt.EventQueue;
import java.awt.Toolkit;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JButton;
import javax.swing.JTextField;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.awt.event.WindowEvent;
import java.rmi.ServerException;
import java.util.ArrayList;
import java.util.Locale;
import java.util.Date;
import java.awt.Font;
import javax.swing.ImageIcon;
import javax.swing.JOptionPane;
import javax.swing.JTextArea;
import javax.swing.SwingConstants;
import javax.swing.JLabel;
import javax.swing.JScrollPane;
import controllers.Application;
import controllers.GroupController;
import controllers.MyTripController;
import controllers.TripController;
import domain.ControllerNotLoadedException;
import domain.Session;
import domain.SessionNotActiveException;
import domain.UserNameAlreadyExistsException;
public class Viaje extends JFrame {
/**
*
*/
private static final long serialVersionUID = -8548069016778574983L;
private static JPanel panel;
private ObservingTextField tFLeaving;
private ObservingTextField tFArriving;
private JTextField tFStatus;
private DatePicker dp1;
private JTextField tFFrom;
private JTextField tFTo;
private JTextField tFCost;
private JTextArea textArea;
/**
* Launch the application.
*/
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
Viaje frame = new Viaje(1,null,null,null, null, null, null);
frame.setVisible(true);
frame.pack();
frame.setSize(900, 602);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the frame.
*/
public Viaje(final Integer i, final TripController trip, final MyTripController myTrip, final ArrayList<String> aux, final Application instance, final Session session, final GroupController groupController) {
panel = new ImagePanel(new ImageIcon("Trip.jpg").getImage());
setContentPane(panel);
setTitle("TreckApp");
setBounds(0, 0, 766, 616);
setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
setResizable(false);
panel.setEnabled(true);
panel.setLayout(null);
final JButton btnNewButton = new JButton();
tFLeaving = new ObservingTextField();
tFLeaving.setEnabled(false);
tFLeaving.setDisabledTextColor(Color.BLACK);
tFLeaving.setHorizontalAlignment(SwingConstants.CENTER);
tFLeaving.setFont(new Font("Tahoma", Font.PLAIN, 15));
tFLeaving.setBounds(176, 77, 106, 28);
panel.add(tFLeaving);
tFLeaving.setColumns(10);
btnNewButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
if(i == 1 || i == 0){
String lang = null;
final Locale locale = getLocale(lang);
DatePicker dp = new DatePicker(tFLeaving, locale);
Date selectedDate = dp.parseDate(tFLeaving.getText());
dp.setSelectedDate(selectedDate);
dp.start(tFLeaving);
}else{
btnNewButton.setEnabled(false);
}
}
});
btnNewButton.setBounds(36, 77, 121, 28);
panel.add(btnNewButton);
final JLabel lblTrip = new JLabel();
lblTrip.setHorizontalAlignment(SwingConstants.CENTER);
lblTrip.setFont(new Font("Tahoma", Font.BOLD, 27));
lblTrip.setForeground(Color.WHITE);
lblTrip.setBounds(390, 20, 92, 35);
panel.add(lblTrip);
tFArriving = new ObservingTextField();
tFArriving.setDisabledTextColor(Color.BLACK);
tFArriving.setHorizontalAlignment(SwingConstants.CENTER);
tFArriving.setFont(new Font("Tahoma", Font.PLAIN, 15));
tFArriving.setEnabled(false);
tFArriving.setColumns(10);
tFArriving.setBounds(176, 123, 106, 28);
panel.add(tFArriving);
final JButton button = new JButton();
button.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
if(i == 1 || i == 0){
String lang = null;
final Locale locale = getLocale(lang);
dp1 = new DatePicker(tFArriving, locale);
Date selectedDate = dp1.parseDate(tFArriving.getText());
dp1.setSelectedDate(selectedDate);
dp1.start(tFArriving);
}else{
button.setEnabled(false);
}
}
});
button.setBounds(36, 123, 121, 28);
panel.add(button);
final JLabel lblState = new JLabel();
lblState.setFont(new Font("Tahoma", Font.BOLD, 17));
lblState.setForeground(Color.WHITE);
lblState.setBounds(412, 87, 180, 35);
panel.add(lblState);
tFStatus = new JTextField();
tFStatus.setDisabledTextColor(Color.BLACK);
tFStatus.setEnabled(false);
tFStatus.setFont(new Font("Tahoma", Font.PLAIN, 17));
tFStatus.setBounds(602, 87, 138, 35);
panel.add(tFStatus);
tFStatus.setColumns(10);
final JButton btnReady = new JButton();
btnReady.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
Integer flag = 0;
Integer day1= null;
Integer day2= null;
Integer month1= null;
Integer month2= null;
Integer year1= null;
Integer year2 = null;
//flag=1 corresponde a que no hay error
//flag=2 error en la introduccion de las fechas
//flag=3 no introdujo una ciudad de origen
//flag=4 no introdujo una ciudad de finalizacion
//flag=5 no introdujo un costo estimado del viaje
//flag=6 no introdujo un costo estimado del viaje correcto
//flag=7 no introdujo una descripcion del viaje
try{
day1 = Integer.parseInt(tFLeaving.getText().substring(0, 2));
day2 = Integer.parseInt(tFArriving.getText().substring(0, 2));
month1 = Integer.parseInt(tFLeaving.getText().substring(3, 5));
month2 = Integer.parseInt(tFArriving.getText().substring(3, 5));
year1 = Integer.parseInt(tFLeaving.getText().substring(6, 8));
year2 = Integer.parseInt(tFArriving.getText().substring(6, 8));
if(day1 > day2 && month1 == month2 & year1 == year2 || month1 > month2 && year1 == year2 || year1 > year2){
flag = 2;
}else if(tFFrom.getText().isEmpty()){
flag = 3;
} else if(tFTo.getText().isEmpty()){
flag = 4;
}else if(tFCost.getText().isEmpty()){
flag = 5;
}else if(!isNumeric(tFCost.getText())){
flag = 6;
}else if(textArea.getText().isEmpty()){
flag = 7;
}else{
flag = 1;
}
}catch (Exception e1){
System.err.println("No introdujo fechas");
}
if(instance != null){
switch(flag){
case 1:
MyTripController viaje = null;
if(i == 0){
Date dateL = new Date(year1, month1, day1);
Date dateA = new Date(year2, month2, day2);
try {
viaje = instance.registerTrip(dateL, dateA, Double.parseDouble(tFCost.getText()), textArea.getText(), tFTo.getText(), tFFrom.getText());
} catch (ServerException e1) {
e1.printStackTrace();
} catch (NumberFormatException e1) {
e1.printStackTrace();
} catch (UserNameAlreadyExistsException e1) {
e1.printStackTrace();
} catch (SessionNotActiveException e1) {
e1.printStackTrace();
}
Grupo frame = new Grupo(0,viaje, null ,aux, instance, session, groupController);
frame.setVisible(true);
frame.pack();
frame.setSize(900, 602);
close();
}else if(i == 1){
Grupo frame = new Grupo(1,viaje,null, null, instance, session, groupController);
frame.setVisible(true);
frame.pack();
frame.setSize(900, 602);
close();
}else if(i == 2){
Grupo frame = new Grupo(2 ,null, trip ,null, instance, session, groupController);
frame.setVisible(true);
frame.pack();
frame.setSize(900, 602);
close();
}
break;
case 2:
JOptionPane.showMessageDialog(null, "No introdujo una fecha correcta", "ERROR", JOptionPane.ERROR_MESSAGE);
break;
case 3:
JOptionPane.showMessageDialog(null, "No introdujo una ciudad de origen", "ERROR", JOptionPane.ERROR_MESSAGE);
break;
case 4:
JOptionPane.showMessageDialog(null, "No introdujo una ciudad de finalizacion", "ERROR", JOptionPane.ERROR_MESSAGE);
break;
case 5:
JOptionPane.showMessageDialog(null, "No introdujo un costo estimado del viaje", "ERROR", JOptionPane.ERROR_MESSAGE);
break;
case 6:
JOptionPane.showMessageDialog(null, "No introdujo un costo estimado del viaje correcto", "ERROR", JOptionPane.ERROR_MESSAGE);
break;
case 7:
JOptionPane.showMessageDialog(null, "No introdujo una descripcion del viaje que quiere crear", "ERROR", JOptionPane.ERROR_MESSAGE);
break;
default:
JOptionPane.showMessageDialog(null, "No introdujo datos obligatorios", "ERROR", JOptionPane.ERROR_MESSAGE);
break;
}
}
}
});
btnReady.setBounds(651, 544, 89, 23);
panel.add(btnReady);
final JLabel lblDescription = new JLabel();
lblDescription.setForeground(Color.WHITE);
lblDescription.setFont(new Font("Tahoma", Font.BOLD, 17));
lblDescription.setBounds(25, 379, 246, 35);
panel.add(lblDescription);
JScrollPane scrollPane = new JScrollPane(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS,
JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
scrollPane.setBounds(245, 369, 472, 119);
panel.add(scrollPane);
textArea = new JTextArea();
scrollPane.setViewportView(textArea);
textArea.setFont(new Font("Tahoma", Font.PLAIN, 13));
textArea.setLineWrap(true);
textArea.setWrapStyleWord(true);
final JLabel lblFrom = new JLabel();
lblFrom.setForeground(Color.WHITE);
lblFrom.setFont(new Font("Tahoma", Font.BOLD, 17));
lblFrom.setBounds(26, 212, 180, 35);
panel.add(lblFrom);
tFFrom = new JTextField();
tFFrom.setBounds(253, 222, 134, 20);
panel.add(tFFrom);
tFFrom.setColumns(10);
final JLabel lblTo = new JLabel();
lblTo.setForeground(Color.WHITE);
lblTo.setFont(new Font("Tahoma", Font.BOLD, 17));
lblTo.setBounds(26, 267, 211, 35);
panel.add(lblTo);
final JLabel lbltrip = new JLabel();
lbltrip.setForeground(Color.WHITE);
lbltrip.setFont(new Font("Tahoma", Font.BOLD, 17));
lbltrip.setBounds(79, 298, 106, 35);
panel.add(lbltrip);
tFTo = new JTextField();
tFTo.setBounds(253, 293, 134, 20);
panel.add(tFTo);
tFTo.setColumns(10);
final JLabel label = new JLabel();
label.setFont(new Font("Tahoma", Font.BOLD, 17));
label.setForeground(Color.WHITE);
label.setBounds(224, 283, 25, 35);
panel.add(label);
final JLabel lblCost = new JLabel();
lblCost.setForeground(Color.WHITE);
lblCost.setFont(new Font("Tahoma", Font.BOLD, 17));
lblCost.setBounds(452, 253, 180, 35);
panel.add(lblCost);
tFCost = new JTextField();
tFCost.setBounds(640, 263, 86, 20);
panel.add(tFCost);
tFCost.setColumns(10);
if(i == 0 || i == 1){
tFFrom.setEditable(true);
tFTo.setEditable(true);
tFCost.setEditable(true);
textArea.setEditable(true);
}else if( i == 2 ){
tFFrom.setEditable(false);
tFTo.setEditable(false);
tFCost.setEditable(false);
textArea.setEditable(false);
}
if((trip != null || myTrip != null) && instance != null){
try {
tFFrom.setText(trip.getOriginCity());
tFTo.setText(trip.getEndCity());
tFLeaving.setText(trip.getStartDate().toString());
tFArriving.setText(trip.getStartDate().toString());
tFCost.setText(" $ " + trip.getEstimateCost().toString());
tFStatus.setText(trip.getTripStatus().getName());
textArea.setText(trip.getTripDescription());
} catch (SessionNotActiveException e1) {
e1.printStackTrace();
} catch (ControllerNotLoadedException e1) {
e1.printStackTrace();
}
}
JButton img = new JButton();
img.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
//language = 1;
lblCost.setText("Costo Aproximado :");
lblTo.setText("Ciudad de Finalizaci\u00F3n");
lblFrom.setText("Ciudad de Origen : ");
lblDescription.setText("Descripcion del Viaje : ");
btnReady.setText("Listo");
lblState.setText("Estado del Viaje : ");
button.setText("Hasta");
btnNewButton.setText("Desde");
lblTrip.setText("Viaje");
lbltrip.setText("del Viaje");
label.setText(":");
lblState.setBounds(412, 87, 180, 35);
lblFrom.setBounds(26, 212, 180, 35);
lblTo.setBounds(26, 267, 211, 35);
}
});
ImageIcon imageS = new ImageIcon("SpanishFlag.jpg");
panel.add(img);
img.setIcon(imageS);
img.setSize(22,18);
img.setLocation(796,11);
img.setVisible(true);
JButton img2 = new JButton();
img2.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
//language = 2;
lblCost.setText("Estimated cost :");
lblTo.setText("To :");
lblFrom.setText("From : ");
lblDescription.setText("Trip Description : ");
btnReady.setText("Ready");
lblState.setText("Trip Status : ");
button.setText("Returning on");
btnNewButton.setText("Leaving on");
lblTrip.setText("Trip");
lbltrip.setText("");
label.setText("");
lblState.setBounds(450, 87, 180, 35);
lblFrom.setBounds(170, 212, 180, 35);
lblTo.setBounds(180, 285, 211, 35);
}
});
ImageIcon imageE = new ImageIcon("EnglishFlag.jpg");
panel.add(img2);
img2.setIcon(imageE);
img2.setSize(22,18);
img2.setLocation(760,11);
img2.setVisible(true);
lblCost.setText("Costo Aproximado :");
lblTo.setText("Ciudad de Finalizaci\u00F3n");
lblFrom.setText("Ciudad de Origen : ");
lblDescription.setText("Descripcion del Viaje : ");
btnReady.setText("Listo");
lblState.setText("Estado del Viaje : ");
button.setText("Hasta");
btnNewButton.setText("Desde");
lblTrip.setText("Viaje");
lbltrip.setText("del Viaje");
label.setText(":");
}
private Locale getLocale (String loc){
if(loc != null && loc.length() > 0){
return new Locale(loc);
}else{
return Locale.UK;
}
}
public static boolean isNumeric(String str)
{
try {
Double.parseDouble(str);
}
catch(NumberFormatException nfe){
return false;
}
return true;
}
public void close(){
WindowEvent winClosingEvent = new WindowEvent(this,WindowEvent.WINDOW_CLOSING);
Toolkit.getDefaultToolkit().getSystemEventQueue().postEvent(winClosingEvent);
}
}
| |
package ${package};
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import javafx.application.Platform;
import javafx.event.EventHandler;
import javafx.geometry.Rectangle2D;
import javafx.scene.Group;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.AnchorPane;
import javafx.scene.paint.Color;
import javafx.scene.shape.LineTo;
import javafx.scene.shape.MoveTo;
import javafx.scene.shape.Path;
import javafx.scene.shape.StrokeLineCap;
import javafx.stage.Screen;
import org.wiigee.control.WiimoteWiigee;
import org.wiigee.device.Wiimote;
import org.wiigee.event.ButtonListener;
import org.wiigee.event.ButtonPressedEvent;
import org.wiigee.event.ButtonReleasedEvent;
import org.wiigee.event.GestureEvent;
import org.wiigee.event.GestureListener;
import org.wiigee.event.InfraredEvent;
import org.wiigee.event.InfraredListener;
@SuppressWarnings("restriction")
public abstract class BaseController implements EventHandler<KeyEvent>, ButtonListener, GestureListener, InfraredListener{
static Map<String, Set<String>> jarEntryCache = new HashMap<String, Set<String>>();
ImageView[] slides;
protected int index=0;
public static double SCALE=8;
protected double screenWidth;
protected double screenHeight;
public static final double SLIDE_WIDTH=200;
public static final double SLIDE_HEIGHT=150;
protected ImageView prevSlide=null;
protected AnchorPane rootPane;
protected Group slideGroup;
protected boolean videoPlayed = false;
protected PresentationHttpServer server;
protected AtomicBoolean a_pressed = new AtomicBoolean(false);
protected Path wiiTrail = new Path();
protected Wiimote wiimote = null;
protected boolean hasTitle = false;
protected int[][] coordinates;
protected double[] middle = new double[] {0, 0};
protected double[] pointer = new double[] {0, 0};
protected double lastdeltaX=0;
protected double lastdeltaY=0;
public void initialize(AnchorPane root) throws NumberFormatException, IOException, NoSuchAlgorithmException {
rootPane = root;
String slideDir="slides/";
if (System.getProperty("slides") != null) {
slideDir = System.getProperty("slides");
if (!slideDir.endsWith("/")) {
slideDir = slideDir + "/";
}
}
List<String> slideNames = new ArrayList<String>(Arrays.asList(Utils.getResourceListing(slideDir)));
Collections.sort(slideNames);
if (slideNames.indexOf("title.png") > -1) {
slideNames.remove(slideNames.indexOf("title.png"));
hasTitle = true;
}
ArrayList<ImageView> slideList = new ArrayList<>();
Rectangle2D primaryScreenBounds = Screen.getPrimary().getVisualBounds();
screenWidth = primaryScreenBounds.getMaxX();
screenHeight = primaryScreenBounds.getMaxY();
for (String next : slideNames) {
if (next.isEmpty() || next.equals("/")) continue;
if (next.endsWith(".video")) {
ImageView nextView = slideList.get(slideList.size() -1);
try (BufferedReader in =
new BufferedReader(new InputStreamReader(Utils.getResource(slideDir + next)))) {
String video=in.readLine();
in.readLine();
String left = in.readLine();
String top = in.readLine();
String width = in.readLine();
String heigth = in.readLine();
File tmp = File.createTempFile(video, null);
tmp.deleteOnExit();
InputStream videoStream = Utils.getResource("html/" + video);
Files.copy(videoStream, Paths.get(tmp.toURI()), StandardCopyOption.REPLACE_EXISTING);
nextView.getProperties().put("video", tmp);
nextView.getProperties().put("left", left);
nextView.getProperties().put("top", top);
nextView.getProperties().put("width", width);
nextView.getProperties().put("height", heigth);
} catch (IOException e) {
e.printStackTrace();
}
} else {
ImageView nextView = new ImageView();
nextView.setFitWidth(SLIDE_WIDTH);
nextView.setFitHeight(SLIDE_HEIGHT);
nextView.setPreserveRatio(true);
nextView.setFocusTraversable(false);
nextView.setLayoutX(screenWidth/2);
nextView.setLayoutX(screenHeight/2);
nextView.setImage(new Image(slideDir + next));
slideList.add(nextView);
}
}
if (hasTitle) {
ImageView nextView = new ImageView();
nextView.setFitWidth(SLIDE_WIDTH);
nextView.setFitHeight(SLIDE_HEIGHT);
nextView.setPreserveRatio(true);
nextView.setFocusTraversable(false);
nextView.setLayoutX(screenWidth/2);
nextView.setLayoutX(screenHeight/2);
nextView.setImage(new Image(slideDir + "title.png"));
slideList.add(0, nextView);
}
slides = (ImageView[]) slideList.toArray(new ImageView[slideList.size()]);
slideGroup = new Group(slides);
slideGroup.getChildren().get(0).toFront();
rootPane.setLayoutX(0);
rootPane.setLayoutY(0);
rootPane.setVisible(true);
rootPane.setFocusTraversable(true);
rootPane.getChildren().add(slideGroup);
rootPane.setOnMouseMoved(new EventHandler<MouseEvent>() {
@Override
public void handle(MouseEvent event) {
double x = event.getX();
double y = event.getY();
if (!a_pressed.get()) {
wiiTrail.getElements().clear();
wiiTrail.getElements().add(new MoveTo(x, y));
wiiTrail.getElements().add(new LineTo(x+1, y));
} else {
wiiTrail.getElements().add(new LineTo(x, y));
}
wiiTrail.toFront();
}
});
SCALE = screenWidth / SLIDE_WIDTH;
rootPane.setStyle("-fx-background-color: #ffffff;");
for (ImageView next : slides) {
next.setLayoutX(screenWidth/2 - SLIDE_WIDTH/2);
next.setLayoutY(screenHeight/2 -SLIDE_HEIGHT/2);
}
if (System.getProperty("wiimote") != null) {
initWiimote();
}
if (System.getProperty("httpport") != null) {
server = new PresentationHttpServer(Integer.parseInt(System.getProperty("httpport")), this);
}
}
public void handle(KeyEvent event) {
handle(event.getCode());
}
public void handleLater(final KeyCode code) {
Platform.runLater(new Runnable() {
@Override
public void run() {
handle(code);
}
});
}
public void handle(KeyCode code) {
synchronized (slides) {
boolean quick=false;
switch (code) {
case LEFT:
index--;
break;
case RIGHT:
index++;
break;
case UP:
index++;
quick=true;
break;
case DOWN:
index--;
quick=true;
break;
case A:
index=0;
break;
case E:
index = slides.length -1;
break;
default:
return;
}
if (index < 0) {
index = 0;
tryVibrate(1000);
return;
} else if (index >= slides.length) {
index = slides.length - 1;
tryVibrate(1500);
return;
} else {
tryVibrate(150);
}
showSlide(index, quick);
}
}
public int slideCount() {
synchronized (slides) {
return slides.length;
}
}
public int currentSlide() {
synchronized (slides) {
return index;
}
}
public void showSlide(int index) {
synchronized (slides) {
showSlide(index, false);
}
}
public int pollSlideChange(long timeout, int fromSlide) {
synchronized (slides) {
if (fromSlide != currentSlide()) {
return currentSlide() - fromSlide;
}
try {
slides.wait(timeout);
} catch (InterruptedException e) {}
return currentSlide() - fromSlide;
}
}
public void showSlide(final int index, final boolean quick) {
synchronized (slides) {
if (index >= slides.length || index < 0) throw new ArrayIndexOutOfBoundsException(index);
this.index = index;
Platform.runLater(new Runnable() {
@Override
public void run() {
synchronized (slides) {
showSlide(slides[index], quick);
slides.notifyAll();
}
}
});
}
}
public abstract void showSlide(final ImageView slide, boolean quick);
public static double getRotation(double angle) {
if (angle > 180) {
return 360 - angle;
} else {
return -angle;
}
}
private void initWiimote() {
try {
WiimoteWiigee wiigee = new WiimoteWiigee();
wiimote = wiigee.getDevice();
wiigee.setTrainButton(Wiimote.BUTTON_1);
wiigee.setRecognitionButton(Wiimote.BUTTON_B);
wiigee.setCloseGestureButton(Wiimote.BUTTON_HOME);
for (String nextGesture : new String[] {"right", "left", "up", "down", "circleCW", "circleCCW" }) {
File tmp = File.createTempFile("gesture_" + nextGesture, ".txt");
tmp.deleteOnExit();
InputStream gestureStream = Utils.getResource("gestureset/" + nextGesture + ".txt") ;
Files.copy(gestureStream , Paths.get(tmp.toURI()), StandardCopyOption.REPLACE_EXISTING);
String gestureName = tmp.getAbsolutePath().substring(0, tmp.getAbsolutePath().length() - 4);
wiimote.loadGesture(gestureName);
}
wiimote.fireButtonPressedEvent(Wiimote.BUTTON_HOME);
wiimote.addButtonListener(this);
wiimote.addGestureListener(this);
wiimote.addInfraredListener(this);
wiimote.setWiiMotionPlusEnabled(true);
wiimote.setInfraredCameraEnabled(true);
wiiTrail.setStroke(Color.KHAKI);
wiiTrail.setOpacity(0.6);
wiiTrail.setStrokeWidth(10);
wiiTrail.setStrokeLineCap(StrokeLineCap.ROUND);
tryVibrate(100);
rootPane.getChildren().add(wiiTrail);
} catch (Throwable e) {
//Wiimote init failing is not fatal, but I'd like to know about it
e.printStackTrace();
}
}
@Override
public void buttonPressReceived(ButtonPressedEvent event) {
System.out.printf("Button pressed %d\n", event.getButton());
switch (event.getButton()) {
case 256:
handleLater(KeyCode.LEFT);
break;
case 512:
handleLater(KeyCode.RIGHT);
break;
case 2048:
handleLater(KeyCode.UP);
break;
case 1024:
handleLater(KeyCode.DOWN);
break;
case 16:
handleLater(KeyCode.A);
break;
case 1:
handleLater(KeyCode.E);
case 8:
a_pressed.set(true);
break;
default:
break;
}
}
@Override
public void buttonReleaseReceived(ButtonReleasedEvent event) {
System.out.printf("Button released %d\n", event.getButton());
switch (event.getButton()) {
case 8:
a_pressed.set(false);
break;
default:
break;
}
}
@Override
public void gestureReceived(GestureEvent event) {
switch (event.getId()) {
case 0:
handleLater(KeyCode.RIGHT);
break;
case 1:
handleLater(KeyCode.LEFT);
break;
case 2:
handleLater(KeyCode.UP);
break;
case 3:
handleLater(KeyCode.DOWN);
break;
case 4:
handleLater(KeyCode.A);
break;
case 5:
handleLater(KeyCode.E);
break;
}
}
private long eventCount=0;
@Override
public void infraredReceived(InfraredEvent event) {
eventCount++;
if ((eventCount % 5) != 0) {
return;
}
this.coordinates = event.getCoordinates();
int x1 = this.coordinates[0][0];
int y1 = this.coordinates[0][1];
int x2 = this.coordinates[1][0];
int y2 = this.coordinates[1][1];
// calculate pointing direction
if(x1<1023 && x2<1023) {
// middle in view, used for pointer calculation
double dx = x2-x1;
double dy = y2-y1;
this.middle[0] = x1+(dx/2);
this.middle[1] = y1+(dy/2);
this.pointer[0] = 1024-this.middle[0];
this.pointer[1] = 768-this.middle[1];
this.lastdeltaX = dx;
this.lastdeltaY = dy;
} else if(x1<1023 && x2>=1023) {
// middle not in view, P1 in view
this.pointer[0] = 1024-x1-(int)(this.lastdeltaX*0.5);
this.pointer[1] = 768-y1-(int)(this.lastdeltaY*0.5);
} else if(x1>=1023 && x2<1023) {
// middle not in view, P2 in view
this.pointer[0] = 1024-x2+(int)(this.lastdeltaX*0.5);
this.pointer[1] = 768-y2+(int)(this.lastdeltaY*0.5);
}
updateRobotMouse();
}
private void updateRobotMouse() {
try {
double x = pointer[0]* screenWidth/1024;
double y = (768-pointer[1])*screenHeight/768;
@SuppressWarnings("deprecation")
final MouseEvent e = MouseEvent.impl_mouseEvent(x, y, x, y, MouseButton.NONE, 0, false, false, false, false, false, false, false, false, false, MouseEvent.MOUSE_MOVED);
Platform.runLater(new Runnable() {
@Override
public void run() {
MouseEvent.fireEvent(rootPane, e);
}
});
} catch (Exception ex) {
ex.printStackTrace();
}
}
protected void tryVibrate(long millis) {
if (wiimote != null) {
try {
wiimote.vibrateForTime(millis);
} catch (Throwable e) {
e.printStackTrace();
}
}
}
public void quit() {
if (server != null) {
server.quit();
}
}
}
| |
package org.jabref.gui;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.swing.SwingUtilities;
import org.jabref.JabRefExecutorService;
import org.jabref.JabRefGUI;
import org.jabref.gui.DuplicateResolverDialog.DuplicateResolverResult;
import org.jabref.gui.DuplicateResolverDialog.DuplicateResolverType;
import org.jabref.gui.undo.NamedCompound;
import org.jabref.gui.undo.UndoableInsertEntry;
import org.jabref.gui.undo.UndoableRemoveEntry;
import org.jabref.gui.worker.CallBack;
import org.jabref.logic.l10n.Localization;
import org.jabref.model.DuplicateCheck;
import org.jabref.model.entry.BibEntry;
import spin.Spin;
public class DuplicateSearch implements Runnable {
private final BasePanel panel;
private List<BibEntry> bes;
private final List<List<BibEntry>> duplicates = new ArrayList<>();
public DuplicateSearch(BasePanel bp) {
panel = bp;
}
@Override
public void run() {
panel.output(Localization.lang("Searching for duplicates..."));
bes = panel.getDatabase().getEntries();
if (bes.size() < 2) {
return;
}
SearcherRunnable st = new SearcherRunnable();
JabRefExecutorService.INSTANCE.executeInterruptableTask(st, "DuplicateSearcher");
int current = 0;
final List<BibEntry> toRemove = new ArrayList<>();
final List<BibEntry> toAdd = new ArrayList<>();
int duplicateCounter = 0;
boolean autoRemoveExactDuplicates = false;
synchronized (duplicates) {
while (!st.finished() || (current < duplicates.size())) {
if (current >= duplicates.size()) {
// wait until the search thread puts something into duplicates vector
// or finish its work
try {
duplicates.wait();
} catch (InterruptedException ignored) {
// Ignore
}
} else { // duplicates found
List<BibEntry> be = duplicates.get(current);
current++;
if (!toRemove.contains(be.get(0)) && !toRemove.contains(be.get(1))) {
// Check if they are exact duplicates:
boolean askAboutExact = false;
if (DuplicateCheck.compareEntriesStrictly(be.get(0), be.get(1)) > 1) {
if (autoRemoveExactDuplicates) {
toRemove.add(be.get(1));
duplicateCounter++;
continue;
}
askAboutExact = true;
}
DuplicateCallBack cb = new DuplicateCallBack(JabRefGUI.getMainFrame(), be.get(0), be.get(1),
askAboutExact ? DuplicateResolverType.DUPLICATE_SEARCH_WITH_EXACT : DuplicateResolverType.DUPLICATE_SEARCH);
((CallBack) Spin.over(cb)).update();
duplicateCounter++;
DuplicateResolverResult answer = cb.getSelected();
if ((answer == DuplicateResolverResult.KEEP_LEFT)
|| (answer == DuplicateResolverResult.AUTOREMOVE_EXACT)) {
toRemove.add(be.get(1));
if (answer == DuplicateResolverResult.AUTOREMOVE_EXACT) {
autoRemoveExactDuplicates = true; // Remember choice
}
} else if (answer == DuplicateResolverResult.KEEP_RIGHT) {
toRemove.add(be.get(0));
} else if (answer == DuplicateResolverResult.BREAK) {
st.setFinished(); // thread killing
current = Integer.MAX_VALUE;
duplicateCounter--; // correct counter
} else if (answer == DuplicateResolverResult.KEEP_MERGE) {
toRemove.addAll(be);
toAdd.add(cb.getMergedEntry());
}
}
}
}
}
final NamedCompound ce = new NamedCompound(Localization.lang("duplicate removal"));
final int dupliC = duplicateCounter;
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
// Now, do the actual removal:
if (!toRemove.isEmpty()) {
for (BibEntry entry : toRemove) {
panel.getDatabase().removeEntry(entry);
ce.addEdit(new UndoableRemoveEntry(panel.getDatabase(), entry, panel));
}
panel.markBaseChanged();
}
// and adding merged entries:
if (!toAdd.isEmpty()) {
for (BibEntry entry : toAdd) {
panel.getDatabase().insertEntry(entry);
ce.addEdit(new UndoableInsertEntry(panel.getDatabase(), entry, panel));
}
panel.markBaseChanged();
}
synchronized (duplicates) {
panel.output(Localization.lang("Duplicates found") + ": " + duplicates.size() + ' '
+ Localization.lang("pairs processed") + ": " + dupliC);
}
ce.end();
panel.getUndoManager().addEdit(ce);
}
});
}
class SearcherRunnable implements Runnable {
private volatile boolean finished;
@Override
public void run() {
for (int i = 0; (i < (bes.size() - 1)) && !finished; i++) {
for (int j = i + 1; (j < bes.size()) && !finished; j++) {
BibEntry first = bes.get(i);
BibEntry second = bes.get(j);
boolean eq = DuplicateCheck.isDuplicate(first, second, panel.getBibDatabaseContext().getMode());
// If (suspected) duplicates, add them to the duplicates vector.
if (eq) {
synchronized (duplicates) {
duplicates.add(Arrays.asList(first, second));
duplicates.notifyAll(); // send wake up all
}
}
}
}
finished = true;
// if no duplicates found, the graphical thread will never wake up
synchronized (duplicates) {
duplicates.notifyAll();
}
}
public boolean finished() {
return finished;
}
// Thread cancel option
// no synchronized used because no "really" critical situations expected
public void setFinished() {
finished = true;
}
}
static class DuplicateCallBack implements CallBack {
private DuplicateResolverResult reply = DuplicateResolverResult.NOT_CHOSEN;
private final JabRefFrame frame;
private final BibEntry one;
private final BibEntry two;
private final DuplicateResolverType dialogType;
private BibEntry merged;
public DuplicateCallBack(JabRefFrame frame, BibEntry one, BibEntry two, DuplicateResolverType dialogType) {
this.frame = frame;
this.one = one;
this.two = two;
this.dialogType = dialogType;
}
public DuplicateResolverResult getSelected() {
return reply;
}
public BibEntry getMergedEntry() {
return merged;
}
@Override
public void update() {
DuplicateResolverDialog diag = new DuplicateResolverDialog(frame, one, two, dialogType);
diag.setVisible(true);
diag.dispose();
reply = diag.getSelected();
merged = diag.getMergedEntry();
}
}
}
| |
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Scanner;
public class NineMenMoris
{
public static void main(String[] args)
{
NineMenMoris solution = new NineMenMoris();
solution.readInputAndSolve();
}
private void readInputAndSolve()
{
Scanner in = new Scanner(System.in);
String move;
NSeed[][] board = new NSeed[7][7];
NSeed player = in.next().equalsIgnoreCase("w") ? NSeed.W : NSeed.B;
move = in.next();
Phase moveType = null;
if (move.equalsIgnoreCase("init"))
{
moveType = Phase.INIT;
}
else if (move.equalsIgnoreCase("mill"))
{
moveType = Phase.MILL;
}
else if (move.equalsIgnoreCase("move"))
{
moveType = Phase.MOVE;
}
for(int i = 0; i < 7; i++)
{
String[] row = in.next().split("");
for (int j = 1 ; j <= 7 ; j++)
{
if (row[j].equalsIgnoreCase("w"))
{
board[i][j-1] = NSeed.W;
}
else if (row[j].equalsIgnoreCase("b"))
{
board[i][j-1] = NSeed.B;
}
else if (row[j].equalsIgnoreCase("-") || row[j].equalsIgnoreCase("|") || row[j].equalsIgnoreCase("*"))
{
board[i][j-1] = NSeed.BLOCKED;
}
else if (row[j].equalsIgnoreCase("O"))
{
board[i][j-1] = NSeed.EMPTY;
}
}
}
in.close();
NineMenMorisPlayer intelligentplayer = new NineMenMorisPlayer(board, player, moveType);
int[] result = intelligentplayer.move();
if (moveType == Phase.INIT)
{
System.out.println(result[3] + " " + result[4]);
}
else if (moveType == Phase.MILL)
{
System.out.println(result[3] + " " + result[4]);
}
else if (moveType == Phase.MOVE)
{
System.out.println(result[1] + " " + result[2] + " " + result[3] + " " + result[4]);
}
}
}
enum NSeed
{
W,
B,
EMPTY,
BLOCKED
};
enum Phase
{
INIT,
MILL,
MOVE
};
class NineMenMorisPlayer
{
final int[][] validIndex = new int[][] {{0,0}, {0,3}, {0,6},
{1,1}, {1,3}, {1,5},
{2,2}, {2,3}, {2,4},
{3,0}, {3,1}, {3,2}, {3,4}, {3,5}, {3,6},
{4,2}, {4,3}, {4,4},
{5,1}, {5,3}, {5,5},
{6,0}, {6,3}, {6,6}};
final int[][] validMills = new int[][]{ {0, 0, 3, 6},
{1, 1, 3, 5},
{2, 2, 3, 4},
{3, 0, 1, 2},
{3, 4, 5, 6},
{4, 2, 3, 4},
{5, 1, 3, 5},
{6, 0, 3, 6}};
final int rows = 7;
final int cols = 7;
NSeed[][] board;
NSeed myPlayer;
NSeed oppPlayer;
Phase gamePhase;
public NineMenMorisPlayer(NSeed[][] board, NSeed myPayer, Phase phase)
{
this.board = board;
this.myPlayer = myPayer;
this.oppPlayer = (this.myPlayer == NSeed.W) ? NSeed.B : NSeed.W;
this.gamePhase = phase;
}
public int[] move()
{
int[] result = minimax(5, myPlayer, Integer.MIN_VALUE, Integer.MAX_VALUE);
return result;
}
private int[] minimax(int depth, NSeed player, int alpha, int beta)
{
List<Move> nextMoves = generateMoves(player);
int score;
int fromRow = -1;
int fromCol = -1;
int toRow = -1;
int toCol = -1;
if (depth == 0 || nextMoves.isEmpty())
{
score = evaluate();
return new int[]{score, fromRow, fromCol, toRow, toCol};
}
else
{
Collections.sort(nextMoves);
for (Move movement: nextMoves)
{
int[] move = new int[]{movement.fromX, movement.fromY, movement.toX, movement.toY};
NSeed otherPlayer = (player == myPlayer) ? oppPlayer : myPlayer;
if (gamePhase == Phase.INIT)
{
board[move[2]][move[3]] = player;
}
else if (gamePhase == Phase.MILL)
{
board[move[2]][move[3]] = NSeed.EMPTY;
}
else if (gamePhase == Phase.MOVE)
{
board[move[0]][move[1]] = NSeed.EMPTY;
board[move[2]][move[3]] = player;
}
if (player == myPlayer)
{
score = minimax(depth - 1, oppPlayer, alpha, beta)[0];
if (score > alpha)
{
alpha = score;
fromRow = move[0];
fromCol = move[1];
toRow = move[2];
toCol = move[3];
}
}
else
{
score = minimax(depth - 1, myPlayer, alpha, beta)[0];
if (score < beta)
{
beta = score;
fromRow = move[0];
fromCol = move[1];
toRow = move[2];
toCol = move[3];
}
}
if (gamePhase == Phase.INIT)
{
board[move[2]][move[3]] = NSeed.EMPTY;
}
else if (gamePhase == Phase.MILL)
{
board[move[2]][move[3]] = otherPlayer;
}
else if (gamePhase == Phase.MOVE)
{
board[move[0]][move[1]] = player;
board[move[2]][move[3]] = NSeed.EMPTY;
}
if (alpha >= beta)
{
break;
}
}
return new int[] {(player == myPlayer) ? alpha : beta, fromRow, fromCol, toRow, toCol};
}
}
private int evaluate()
{
int noMillsForWPlayes = 0;
int noMillsForBPlayers = 0;
int mill = 0;
int diffInMills;
int diffInPlayers;
int diffIn2PeiceConfig;
int diffInDoubleMills;
if (gamePhase == Phase.MILL)
{
mill = 1;
}
int result = 0;
int noWPlayers = 0;
int noBPlayers = 0;
List<int[]> wPlayers = new LinkedList<int[]>();
List<int[]> bPlayers = new LinkedList<int[]>();
for (int i = 0 ; i < 7 ; i++)
{
for (int j = 0 ; j < 7 ; j++)
{
if (board[i][j] == NSeed.W)
{
noWPlayers++;
wPlayers.add(new int[]{i, j});
}
if (board[i][j] == NSeed.B)
{
noBPlayers++;
bPlayers.add(new int[]{i, j});
}
}
}
Pair p = getMillsCount(NSeed.B);
noMillsForBPlayers = p.x;
int noDoubleMillsForBPlayer = p.y;
p = getMillsCount(NSeed.W);
noMillsForWPlayes = p.x;
int noDoubleMillsForWPlayer = p.y;
int no2PConfigW = get2PieceCount(NSeed.W);
int no2PConfigB = get2PieceCount(NSeed.B);
if (myPlayer == NSeed.B)
{
diffInMills = noMillsForBPlayers - noMillsForWPlayes;
diffInPlayers = noBPlayers - noWPlayers;
diffIn2PeiceConfig = no2PConfigB - no2PConfigW;
diffInDoubleMills = noDoubleMillsForBPlayer - noDoubleMillsForWPlayer;
result = 18* mill + 26* diffInMills + 9 * diffInPlayers + 10*diffIn2PeiceConfig + 8*diffInDoubleMills;
}
else
{
diffInMills = noMillsForWPlayes - noMillsForBPlayers;
diffInPlayers = noWPlayers - noBPlayers;
diffIn2PeiceConfig = no2PConfigW - no2PConfigB;
diffInDoubleMills = noDoubleMillsForWPlayer - noDoubleMillsForBPlayer;
result = 18* mill + 26* diffInMills + 9 * diffInPlayers + 10*diffIn2PeiceConfig + 8*diffInDoubleMills;
}
return result;
}
public boolean hasWon(NSeed player)
{
int count = 0;
if (player == NSeed.B)
{
for (int i = 0 ; i < 7 ; i++)
for (int j = 0 ; j < 7 ; j++)
if (board[i][j] == NSeed.W)
count++;
}
else
{
for (int i = 0 ; i < 7 ; i++)
for (int j = 0 ; j < 7 ; j++)
if (board[i][j] == NSeed.B)
count++;
}
return (count <= 2) ? true : false;
}
private List<Move> generateMoves(NSeed player)
{
List<Move> moves = new LinkedList<Move>();
if (gamePhase == Phase.INIT) //just place stuffs
{
for (int i = 0 ; i < 24 ; i++)
{
int[] index = validIndex[i];
if (board[index[0]][index[1]] == NSeed.EMPTY)
{
moves.add(new Move(-1, -1, index[0], index[1], validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase));
}
}
}
else if (gamePhase == Phase.MILL)
{
NSeed otherPlayer = (player == myPlayer) ? oppPlayer : myPlayer;
for (int i = 0 ; i < 24 ; i++)
{
int[] index = validIndex[i];
if (board[index[0]][index[1]] == otherPlayer)
{
moves.add(new Move(-1, -1, index[0], index[1], validMills, board, player, otherPlayer, gamePhase));
}
}
}
else if (gamePhase == Phase.MOVE)
{
List<int[]> myPositions = new LinkedList<int[]>();
for (int i = 0 ; i < 24 ; i++)
{
int[] index = validIndex[i];
if (board[index[0]][index[1]] == player)
{
myPositions.add(index);
}
}
if (myPositions.size() > 3)
{
moves.addAll(generateAdjacentMoves(myPositions, player));
}
else
{
for (int i = 0 ; i < 24 ; i++)
{
int[] index = validIndex[i];
if (board[index[0]][index[1]] == NSeed.EMPTY)
{
for (int[] position: myPositions)
{
moves.add(new Move(position[0], position[1], index[0], index[1], validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase));
}
}
}
}
}
return moves;
}
private List<Move> generateAdjacentMoves(List<int[]> positions, NSeed player)
{
List<Move> moves = new LinkedList<Move>();
for (int[] position: positions)
{
int row = position[0];
int col = position[1];
int hDistance = 3 - row;
int vDistance = 3 - col;
//up
int index = row -vDistance;
if (index >= 0 && index <= 6 && board[index][col] == NSeed.EMPTY)
{
Move m = new Move(row, col, index, col, validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase);
moves.add(m);
}
//down
index = row + vDistance;
if (index >= 0 && index <= 6 && board[index][col] == NSeed.EMPTY)
{
Move m = new Move(row, col, index, col, validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase);
moves.add(m);
}
//left
index = col - hDistance;
if (index >= 0 && index <= 6 && board[row][index] == NSeed.EMPTY)
{
Move m = new Move(row, col, row, index, validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase);
moves.add(m);
}
//right
index = col + hDistance;
if (index >= 0 && index <= 6 && board[row][index] == NSeed.EMPTY)
{
Move m = new Move(row, col, row, index, validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase);
moves.add(m);
}
if (row == 3)
{
index = col + 1;
if (index >= 0 && index <= 6 && board[row][index] == NSeed.EMPTY)
{
Move m = new Move(row, col, row, index, validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase);
moves.add(m);
}
index = col - 1;
if (index >= 0 && index <= 6 && board[row][index] == NSeed.EMPTY)
{
Move m = new Move(row, col, row, index, validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase);
moves.add(m);
}
}
else if (col == 3)
{
index = row - 1;
if (index >= 0 && index <= 6 && board[index][col] == NSeed.EMPTY)
{
Move m = new Move(row, col, index, col, validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase);
moves.add(m);
}
//down
index = row + 1;
if (index >= 0 && index <= 6 && board[index][col] == NSeed.EMPTY)
{
Move m = new Move(row, col, index, col, validMills, board, player, player == myPlayer ? oppPlayer : myPlayer, gamePhase);
moves.add(m);
}
}
}
return moves;
}
private Pair getMillsCount(NSeed player)
{
Pair p = new Pair();
int countMill = 0;
int countDoubleMill = 0;
for (int i = 0 ; i < 8; i++)
{
int count = 0;
int[] mill = validMills[i];
int seed = mill[0];
if (board[mill[1]][seed] == player && board[mill[2]][seed] == player && board[mill[3]][seed] == player)
{
count++;
}
if (board[seed][mill[1]] == player && board[seed][mill[2]] == player && board[seed][mill[3]] == player)
{
count++;
}
countMill += count;
if (count == 2)
{
countDoubleMill += 1;
}
}
p.x = countMill;
p.y = countDoubleMill;
return p;
}
private int get2PieceCount(NSeed player)
{
int count = 0;
int countFilled = 0;
int countEmpty = 0;
for (int i = 0 ; i < 8; i++)
{
int[] mill = validMills[i];
int seed = mill[0];
if (board[mill[1]][seed] == player)
{
countFilled++;
}
else if (board[mill[1]][seed] == NSeed.EMPTY)
{
countEmpty++;
}
if (board[mill[2]][seed] == player)
{
countFilled++;
}
else if (board[mill[2]][seed] == NSeed.EMPTY)
{
countEmpty++;
}
if (board[mill[3]][seed] == player)
{
countFilled++;
}
else if (board[mill[3]][seed] == NSeed.EMPTY)
{
countEmpty++;
}
if (countFilled == 2 && countEmpty == 1)
{
count++;
}
countEmpty = 0;
countFilled = 0;
if (board[seed][mill[1]] == player)
{
countFilled++;
}
else if (board[seed][mill[1]] == NSeed.EMPTY)
{
countEmpty++;
}
if (board[seed][mill[2]] == player)
{
countFilled++;
}
else if (board[seed][mill[2]] == NSeed.EMPTY)
{
countEmpty++;
}
if (board[seed][mill[3]] == player)
{
countFilled++;
}
else if (board[seed][mill[3]] == NSeed.EMPTY)
{
countEmpty++;
}
if (countFilled == 2 && countEmpty == 1)
{
count++;
}
countEmpty = 0;
countFilled = 0;
}
return count;
}
}
class Pair
{
int x;
int y;
}
class Move implements Comparable<Move>
{
int fromX;
int fromY;
int toX;
int toY;
int priority = 0;
Phase gamePhase;
public Move(int fromX, int fromY, int toX, int toY, int[][] validMills, NSeed[][] board, NSeed player, NSeed otherPlayer, Phase p)
{
this.fromX = fromX;
this.fromY = fromY;
this.toX = toX;
this.toY = toY;
gamePhase = p;
NSeed [][] myInt = new NSeed[board.length][];
for(int i = 0; i < board.length; i++)
myInt[i] = board[i].clone();
myInt[toX][toY] = player;
if (fromX > -1)
{
myInt[fromX][fromY] = NSeed.EMPTY;
}
for (int i = 0 ; i < 8; i++)
{
int[] mill = validMills[i];
int seed = mill[0];
if (seed == toX || seed == toY)
{
if (myInt[mill[1]][seed] == player && myInt[mill[2]][seed] == player && myInt[mill[3]][seed] == player)
{
if (seed == toX)
{
if (mill[1] == toY || mill[2] == toY || mill[3] == toY)
{
priority = 100;
}
}
else if (seed == toY)
{
if (mill[1] == toX || mill[2] == toX || mill[3] == toX)
{
priority = 100;
}
}
}
if (myInt[seed][mill[1]] == player && myInt[seed][mill[2]] == player && myInt[seed][mill[3]] == player)
{
if (seed == toX)
{
if (mill[1] == toY || mill[2] == toY || mill[3] == toY)
{
priority = priority == 100 ? 200 : 100;
}
}
else if (seed == toY)
{
if (mill[1] == toX || mill[2] == toX || mill[3] == toX)
{
priority = priority == 100 ? 200 : 100;
}
}
}
if (priority <= 0)
{
int count = 0;
if (seed == toX)
{
count = 0;
int maxVal = Integer.MAX_VALUE;
int minVal = Integer.MIN_VALUE;
if (seed == 3)
{
if (toX <= 2)
{
maxVal = 2;
minVal = 0;
}
if (toX > 2)
{
maxVal = 6;
minVal = 4;
}
}
if (myInt[seed][mill[1]] == otherPlayer && minVal <= mill[1] && mill[1] <= maxVal) count++;
if (myInt[seed][mill[2]] == otherPlayer && minVal <= mill[2] && mill[2] <= maxVal) count++;
if (myInt[seed][mill[3]] == otherPlayer && minVal <= mill[3] && mill[3] <= maxVal) count++;
if (count >= 2)
{
priority = 50;
}
}
if (seed == toY)
{
count = 0;
int maxVal = Integer.MAX_VALUE;
int minVal = Integer.MIN_VALUE;
if (seed == 3)
{
if (toX <= 2)
{
maxVal = 2;
minVal = 0;
}
if (toX > 2)
{
maxVal = 6;
minVal = 4;
}
}
if (myInt[mill[1]][seed] == otherPlayer && minVal <= mill[1] && mill[1] <= maxVal) count++;
if (myInt[mill[2]][seed] == otherPlayer && minVal <= mill[2] && mill[2] <= maxVal) count++;
if (myInt[mill[3]][seed] == otherPlayer && minVal <= mill[3] && mill[3] <= maxVal) count++;
if (count >= 2)
{
priority = 50;
}
}
}
}
}
}
@Override
public int compareTo(Move o)
{
if (this.priority > o.priority)
{
return -1;
}
else if (this.priority < o.priority)
{
return 1;
}
else
{
return 0;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler;
import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.TestUtils.toSchedulerKey;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerImpl;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.server.scheduler.SchedulerRequestKey;
import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator;
import org.junit.After;
import org.junit.Test;
public class TestSchedulerApplicationAttempt {
private static final NodeId nodeId = NodeId.newInstance("somehost", 5);
private Configuration conf = new Configuration();
@After
public void tearDown() {
QueueMetrics.clearQueueMetrics();
DefaultMetricsSystem.shutdown();
}
@Test
public void testMove() {
final String user = "user1";
Queue parentQueue = createQueue("parent", null);
Queue oldQueue = createQueue("old", parentQueue);
Queue newQueue = createQueue("new", parentQueue);
QueueMetrics parentMetrics = parentQueue.getMetrics();
QueueMetrics oldMetrics = oldQueue.getMetrics();
QueueMetrics newMetrics = newQueue.getMetrics();
ApplicationAttemptId appAttId = createAppAttemptId(0, 0);
RMContext rmContext = mock(RMContext.class);
when(rmContext.getEpoch()).thenReturn(3L);
SchedulerApplicationAttempt app = new SchedulerApplicationAttempt(appAttId,
user, oldQueue, oldQueue.getAbstractUsersManager(), rmContext);
oldMetrics.submitApp(user);
// confirm that containerId is calculated based on epoch.
assertEquals(0x30000000001L, app.getNewContainerId());
// Resource request
Resource requestedResource = Resource.newInstance(1536, 2);
Priority requestedPriority = Priority.newInstance(2);
ResourceRequest request = ResourceRequest.newInstance(requestedPriority,
ResourceRequest.ANY, requestedResource, 3);
app.updateResourceRequests(Arrays.asList(request));
// Allocated container
RMContainer container1 = createRMContainer(appAttId, 1, requestedResource);
app.liveContainers.put(container1.getContainerId(), container1);
SchedulerNode node = createNode();
app.appSchedulingInfo.allocate(NodeType.OFF_SWITCH, node,
toSchedulerKey(requestedPriority), container1.getContainer());
// Reserved container
Priority prio1 = Priority.newInstance(1);
Resource reservedResource = Resource.newInstance(2048, 3);
RMContainer container2 = createReservedRMContainer(appAttId, 1, reservedResource,
node.getNodeID(), prio1);
Map<NodeId, RMContainer> reservations = new HashMap<NodeId, RMContainer>();
reservations.put(node.getNodeID(), container2);
app.reservedContainers.put(toSchedulerKey(prio1), reservations);
oldMetrics.reserveResource(container2.getNodeLabelExpression(),
user, reservedResource);
checkQueueMetrics(oldMetrics, 1, 1, 1536, 2, 2048, 3, 3072, 4);
checkQueueMetrics(newMetrics, 0, 0, 0, 0, 0, 0, 0, 0);
checkQueueMetrics(parentMetrics, 1, 1, 1536, 2, 2048, 3, 3072, 4);
app.move(newQueue);
checkQueueMetrics(oldMetrics, 0, 0, 0, 0, 0, 0, 0, 0);
checkQueueMetrics(newMetrics, 1, 1, 1536, 2, 2048, 3, 3072, 4);
checkQueueMetrics(parentMetrics, 1, 1, 1536, 2, 2048, 3, 3072, 4);
}
private void checkQueueMetrics(QueueMetrics metrics, int activeApps,
int runningApps, int allocMb, int allocVcores, int reservedMb,
int reservedVcores, int pendingMb, int pendingVcores) {
assertEquals(activeApps, metrics.getActiveApps());
assertEquals(runningApps, metrics.getAppsRunning());
assertEquals(allocMb, metrics.getAllocatedMB());
assertEquals(allocVcores, metrics.getAllocatedVirtualCores());
assertEquals(reservedMb, metrics.getReservedMB());
assertEquals(reservedVcores, metrics.getReservedVirtualCores());
assertEquals(pendingMb, metrics.getPendingMB());
assertEquals(pendingVcores, metrics.getPendingVirtualCores());
}
private SchedulerNode createNode() {
SchedulerNode node = mock(SchedulerNode.class);
when(node.getNodeName()).thenReturn("somehost");
when(node.getRackName()).thenReturn("somerack");
when(node.getNodeID()).thenReturn(nodeId);
return node;
}
private RMContainer createReservedRMContainer(ApplicationAttemptId appAttId,
int id, Resource resource, NodeId nodeId, Priority reservedPriority) {
RMContainer container = createRMContainer(appAttId, id, resource);
when(container.getReservedResource()).thenReturn(resource);
when(container.getReservedSchedulerKey())
.thenReturn(toSchedulerKey(reservedPriority));
when(container.getReservedNode()).thenReturn(nodeId);
return container;
}
private RMContainer createRMContainer(ApplicationAttemptId appAttId, int id,
Resource resource) {
ContainerId containerId = ContainerId.newContainerId(appAttId, id);
RMContainer rmContainer = mock(RMContainerImpl.class);
Container container = mock(Container.class);
when(container.getResource()).thenReturn(resource);
when(container.getNodeId()).thenReturn(nodeId);
when(rmContainer.getContainer()).thenReturn(container);
when(rmContainer.getContainerId()).thenReturn(containerId);
return rmContainer;
}
private Queue createQueue(String name, Queue parent) {
return createQueue(name, parent, 1.0f);
}
private Queue createQueue(String name, Queue parent, float capacity) {
QueueMetrics metrics = QueueMetrics.forQueue(name, parent, false, conf);
QueueInfo queueInfo = QueueInfo.newInstance(name, capacity, 1.0f, 0, null,
null, QueueState.RUNNING, null, "", null, false);
ActiveUsersManager activeUsersManager = new ActiveUsersManager(metrics);
Queue queue = mock(Queue.class);
when(queue.getMetrics()).thenReturn(metrics);
when(queue.getAbstractUsersManager()).thenReturn(activeUsersManager);
when(queue.getQueueInfo(false, false)).thenReturn(queueInfo);
return queue;
}
private ApplicationAttemptId createAppAttemptId(int appId, int attemptId) {
ApplicationId appIdImpl = ApplicationId.newInstance(0, appId);
ApplicationAttemptId attId =
ApplicationAttemptId.newInstance(appIdImpl, attemptId);
return attId;
}
@Test
public void testAppPercentages() throws Exception {
FifoScheduler scheduler = mock(FifoScheduler.class);
when(scheduler.getClusterResource())
.thenReturn(Resource.newInstance(10 * 1024, 10));
when(scheduler.getResourceCalculator())
.thenReturn(new DefaultResourceCalculator());
ApplicationAttemptId appAttId = createAppAttemptId(0, 0);
RMContext rmContext = mock(RMContext.class);
when(rmContext.getEpoch()).thenReturn(3L);
when(rmContext.getScheduler()).thenReturn(scheduler);
final String user = "user1";
Queue queue = createQueue("test", null);
SchedulerApplicationAttempt app =
new SchedulerApplicationAttempt(appAttId, user, queue,
queue.getAbstractUsersManager(), rmContext);
// Resource request
Resource requestedResource = Resource.newInstance(1536, 2);
app.attemptResourceUsage.incUsed(requestedResource);
assertEquals(15.0f, app.getResourceUsageReport().getQueueUsagePercentage(),
0.01f);
assertEquals(15.0f,
app.getResourceUsageReport().getClusterUsagePercentage(), 0.01f);
queue = createQueue("test2", null, 0.5f);
app = new SchedulerApplicationAttempt(appAttId, user, queue,
queue.getAbstractUsersManager(), rmContext);
app.attemptResourceUsage.incUsed(requestedResource);
assertEquals(30.0f, app.getResourceUsageReport().getQueueUsagePercentage(),
0.01f);
assertEquals(15.0f,
app.getResourceUsageReport().getClusterUsagePercentage(), 0.01f);
app.attemptResourceUsage.incUsed(requestedResource);
app.attemptResourceUsage.incUsed(requestedResource);
app.attemptResourceUsage.incUsed(requestedResource);
assertEquals(120.0f, app.getResourceUsageReport().getQueueUsagePercentage(),
0.01f);
assertEquals(60.0f,
app.getResourceUsageReport().getClusterUsagePercentage(), 0.01f);
queue = createQueue("test3", null, 0.0f);
app = new SchedulerApplicationAttempt(appAttId, user, queue,
queue.getAbstractUsersManager(), rmContext);
// Resource request
app.attemptResourceUsage.incUsed(requestedResource);
assertEquals(0.0f, app.getResourceUsageReport().getQueueUsagePercentage(),
0.01f);
assertEquals(15.0f,
app.getResourceUsageReport().getClusterUsagePercentage(), 0.01f);
}
@Test
public void testAppPercentagesOnswitch() throws Exception {
FifoScheduler scheduler = mock(FifoScheduler.class);
when(scheduler.getClusterResource()).thenReturn(Resource.newInstance(0, 0));
when(scheduler.getResourceCalculator())
.thenReturn(new DefaultResourceCalculator());
ApplicationAttemptId appAttId = createAppAttemptId(0, 0);
RMContext rmContext = mock(RMContext.class);
when(rmContext.getEpoch()).thenReturn(3L);
when(rmContext.getScheduler()).thenReturn(scheduler);
final String user = "user1";
Queue queue = createQueue("test", null);
SchedulerApplicationAttempt app = new SchedulerApplicationAttempt(appAttId,
user, queue, queue.getAbstractUsersManager(), rmContext);
// Resource request
Resource requestedResource = Resource.newInstance(1536, 2);
app.attemptResourceUsage.incUsed(requestedResource);
assertEquals(0.0f, app.getResourceUsageReport().getQueueUsagePercentage(),
0.0f);
assertEquals(0.0f, app.getResourceUsageReport().getClusterUsagePercentage(),
0.0f);
}
@Test
public void testSchedulingOpportunityOverflow() throws Exception {
ApplicationAttemptId attemptId = createAppAttemptId(0, 0);
Queue queue = createQueue("test", null);
RMContext rmContext = mock(RMContext.class);
when(rmContext.getEpoch()).thenReturn(3L);
SchedulerApplicationAttempt app = new SchedulerApplicationAttempt(
attemptId, "user", queue, queue.getAbstractUsersManager(), rmContext);
Priority priority = Priority.newInstance(1);
SchedulerRequestKey schedulerKey = toSchedulerKey(priority);
assertEquals(0, app.getSchedulingOpportunities(schedulerKey));
app.addSchedulingOpportunity(schedulerKey);
assertEquals(1, app.getSchedulingOpportunities(schedulerKey));
// verify the count is capped at MAX_VALUE and does not overflow
app.setSchedulingOpportunities(schedulerKey, Integer.MAX_VALUE - 1);
assertEquals(Integer.MAX_VALUE - 1,
app.getSchedulingOpportunities(schedulerKey));
app.addSchedulingOpportunity(schedulerKey);
assertEquals(Integer.MAX_VALUE,
app.getSchedulingOpportunities(schedulerKey));
app.addSchedulingOpportunity(schedulerKey);
assertEquals(Integer.MAX_VALUE,
app.getSchedulingOpportunities(schedulerKey));
}
}
| |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.drm.mobile1;
import java.io.*;
import java.util.*;
/**
* This class provides interfaces to access the DRM right manager.
*/
public class DrmRightsManager {
/**
* The "application/vnd.oma.drm.rights+xml" mime type.
*/
public static final String DRM_MIMETYPE_RIGHTS_XML_STRING = "application/vnd.oma.drm.rights+xml";
/**
* The "application/vnd.oma.drm.rights+wbxml" mime type.
*/
public static final String DRM_MIMETYPE_RIGHTS_WBXML_STRING = "application/vnd.oma.drm.rights+wbxml";
/**
* The id of "application/vnd.oma.drm.rights+xml" mime type.
*/
private static final int DRM_MIMETYPE_RIGHTS_XML = 3;
/**
* The id of "application/vnd.oma.drm.rights+wbxml" mime type.
*/
private static final int DRM_MIMETYPE_RIGHTS_WBXML = 4;
/**
* The id of "application/vnd.oma.drm.message" mime type.
*/
private static final int DRM_MIMETYPE_MESSAGE = 1;
/**
* Successful operation.
*/
private static final int JNI_DRM_SUCCESS = 0;
/**
* General failure.
*/
private static final int JNI_DRM_FAILURE = -1;
/**
* The instance of the rights manager.
*/
private static DrmRightsManager singleton = null;
/**
* Construct a DrmRightsManager
*/
protected DrmRightsManager() {
}
/**
* Get the DrmRightsManager instance.
*
* @return the instance of DrmRightsManager.
*/
public static synchronized DrmRightsManager getInstance() {
if (singleton == null) {
singleton = new DrmRightsManager();
}
return singleton;
}
/**
* Install one DRM rights and return one instance of DrmRights.
*
* @param rightsData raw rights data.
* @param mimeTypeStr the mime type of the rights object.
*
* @return the instance of the installed DrmRights.
*/
public synchronized DrmRights installRights(InputStream rightsData, int len, String mimeTypeStr) throws DrmException, IOException {
int mimeType = 0;
if (DRM_MIMETYPE_RIGHTS_XML_STRING.equals(mimeTypeStr))
mimeType = DRM_MIMETYPE_RIGHTS_XML;
else if (DRM_MIMETYPE_RIGHTS_WBXML_STRING.equals(mimeTypeStr))
mimeType = DRM_MIMETYPE_RIGHTS_WBXML;
else if (DrmRawContent.DRM_MIMETYPE_MESSAGE_STRING.equals(mimeTypeStr))
mimeType = DRM_MIMETYPE_MESSAGE;
else
throw new IllegalArgumentException("mimeType must be DRM_MIMETYPE_RIGHTS_XML or DRM_MIMETYPE_RIGHTS_WBXML or DRM_MIMETYPE_MESSAGE");
if (len <= 0)
return null;
DrmRights rights = new DrmRights();
/* call native method to install this rights object. */
int res = nativeInstallDrmRights(rightsData, len, mimeType, rights);
if (JNI_DRM_FAILURE == res)
throw new DrmException("nativeInstallDrmRights() returned JNI_DRM_FAILURE");
return rights;
}
/**
* Query DRM rights of specified DRM raw content.
*
* @param content raw content object.
*
* @return the instance of DrmRights, or null if there is no rights.
*/
public synchronized DrmRights queryRights(DrmRawContent content) {
DrmRights rights = new DrmRights();
/* call native method to query the rights */
int res = nativeQueryRights(content, rights);
if (JNI_DRM_FAILURE == res)
return null;
return rights;
}
/**
* Get the list of all DRM rights saved in local client.
*
* @return the list of all the rights object.
*/
public synchronized List getRightsList() {
List rightsList = new ArrayList();
/* call native method to get how many rights object in current agent */
int num = nativeGetNumOfRights();
if (JNI_DRM_FAILURE == num)
return null;
if (num > 0) {
DrmRights[] rightsArray = new DrmRights[num];
int i;
for (i = 0; i < num; i++)
rightsArray[i] = new DrmRights();
/* call native method to get all the rights information */
num = nativeGetRightsList(rightsArray, num);
if (JNI_DRM_FAILURE == num)
return null;
/* add all rights informations to ArrayList */
for (i = 0; i < num; i++)
rightsList.add(rightsArray[i]);
}
return rightsList;
}
/**
* Delete the specified DRM rights object.
*
* @param rights the specified rights object to be deleted.
*/
public synchronized void deleteRights(DrmRights rights) {
/* call native method to delete the specified rights object */
int res = nativeDeleteRights(rights);
if (JNI_DRM_FAILURE == res)
return;
}
/**
* native method: install rights object to local client.
*
* @param data input DRM rights object data to be installed.
* @param len the length of the data.
* @param mimeType the mime type of this DRM rights object. the value of this field includes:
* #DRM_MIMETYPE_RIGHTS_XML
* #DRM_MIMETYPE_RIGHTS_WBXML
* @parma rights the instance of DRMRights to be filled.
*
* @return #JNI_DRM_SUCCESS if succeed.
* #JNI_DRM_FAILURE if fail.
*/
private native int nativeInstallDrmRights(InputStream data, int len, int mimeType, DrmRights rights);
/**
* native method: query the given DRM content's rights object.
*
* @param content the given DRM content.
* @param rights the instance of rights to set if have.
*
* @return #JNI_DRM_SUCCESS if succeed.
* #JNI_DRM_FAILURE if fail.
*/
private native int nativeQueryRights(DrmRawContent content, DrmRights rights);
/**
* native method: get how many rights object in current DRM agent.
*
* @return the number of the rights object.
* #JNI_DRM_FAILURE if fail.
*/
private native int nativeGetNumOfRights();
/**
* native method: get all the rights object in current local agent.
*
* @param rights the array instance of rights object.
* @param numRights how many rights can be saved.
*
* @return the number of the rights object has been gotten.
* #JNI_DRM_FAILURE if fail.
*/
private native int nativeGetRightsList(DrmRights[] rights, int numRights);
/**
* native method: delete a specified rights object.
*
* @param rights the specified rights object to be deleted.
*
* @return #JNI_DRM_SUCCESS if succeed.
* #JNI_DRM_FAILURE if fail.
*/
private native int nativeDeleteRights(DrmRights rights);
/**
* Load the shared library to link the native methods.
*/
static {
try {
System.loadLibrary("drm1_jni");
}
catch (UnsatisfiedLinkError ule) {
System.err.println("WARNING: Could not load libdrm1_jni.so");
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.dag.history.events;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.tez.dag.api.DagTypeConverters;
import org.apache.tez.dag.api.records.DAGProtos;
import org.apache.tez.dag.api.records.DAGProtos.DAGPlan;
import org.apache.tez.dag.history.HistoryEvent;
import org.apache.tez.dag.history.HistoryEventType;
import org.apache.tez.dag.history.SummaryEvent;
import org.apache.tez.dag.records.TezDAGID;
import org.apache.tez.dag.recovery.records.RecoveryProtos.DAGSubmittedProto;
import org.apache.tez.dag.recovery.records.RecoveryProtos.SummaryEventProto;
import org.apache.tez.dag.utils.ProtoUtils;
public class DAGSubmittedEvent implements HistoryEvent, SummaryEvent {
private static final Logger LOG = LoggerFactory.getLogger(DAGSubmittedEvent.class);
private static final String CHARSET_NAME = "utf-8";
private TezDAGID dagID;
private String dagName;
private long submitTime;
private DAGProtos.DAGPlan dagPlan;
private ApplicationAttemptId applicationAttemptId;
private String user;
private Map<String, LocalResource> cumulativeAdditionalLocalResources;
private boolean historyLoggingEnabled = true;
private Configuration conf;
private String containerLogs;
private String queueName;
public DAGSubmittedEvent() {
}
public DAGSubmittedEvent(TezDAGID dagID, long submitTime,
DAGProtos.DAGPlan dagPlan, ApplicationAttemptId applicationAttemptId,
Map<String, LocalResource> cumulativeAdditionalLocalResources,
String user, Configuration conf, String containerLogs, String queueName) {
this.dagID = dagID;
this.dagName = dagPlan.getName();
this.submitTime = submitTime;
this.dagPlan = dagPlan;
this.applicationAttemptId = applicationAttemptId;
this.cumulativeAdditionalLocalResources = cumulativeAdditionalLocalResources;
this.user = user;
this.conf = conf;
this.containerLogs = containerLogs;
this.queueName = queueName;
}
@Override
public HistoryEventType getEventType() {
return HistoryEventType.DAG_SUBMITTED;
}
@Override
public boolean isRecoveryEvent() {
return true;
}
@Override
public boolean isHistoryEvent() {
return true;
}
public DAGSubmittedProto toProto() {
DAGSubmittedProto.Builder builder =DAGSubmittedProto.newBuilder()
.setDagId(dagID.toString())
.setApplicationAttemptId(applicationAttemptId.toString())
.setDagPlan(dagPlan)
.setSubmitTime(submitTime);
if (queueName != null) {
builder.setQueueName(queueName);
}
if (cumulativeAdditionalLocalResources != null && !cumulativeAdditionalLocalResources.isEmpty()) {
builder.setCumulativeAdditionalAmResources(DagTypeConverters
.convertFromLocalResources(cumulativeAdditionalLocalResources));
}
return builder.build();
}
public void fromProto(DAGSubmittedProto proto) {
this.dagID = TezDAGID.fromString(proto.getDagId());
this.dagPlan = proto.getDagPlan();
this.dagName = this.dagPlan.getName();
this.submitTime = proto.getSubmitTime();
this.applicationAttemptId = ConverterUtils.toApplicationAttemptId(
proto.getApplicationAttemptId());
if (proto.hasQueueName()) {
this.queueName = proto.getQueueName();
}
if (proto.hasCumulativeAdditionalAmResources()) {
this.cumulativeAdditionalLocalResources = DagTypeConverters.convertFromPlanLocalResources(proto
.getCumulativeAdditionalAmResources());
}
}
@Override
public void toProtoStream(OutputStream outputStream) throws IOException {
toProto().writeDelimitedTo(outputStream);
}
@Override
public void fromProtoStream(InputStream inputStream) throws IOException {
DAGSubmittedProto proto = DAGSubmittedProto.parseDelimitedFrom(inputStream);
if (proto == null) {
throw new IOException("No data found in stream");
}
fromProto(proto);
}
@Override
public String toString() {
return "dagID=" + dagID
+ ", submitTime=" + submitTime
+ ", queueName=" + queueName;
}
@Override
public void toSummaryProtoStream(OutputStream outputStream) throws IOException {
ProtoUtils.toSummaryEventProto(dagID, submitTime,
HistoryEventType.DAG_SUBMITTED, dagName.getBytes(CHARSET_NAME))
.writeDelimitedTo(outputStream);
}
@Override
public void fromSummaryProtoStream(SummaryEventProto proto) throws IOException {
this.dagID = TezDAGID.fromString(proto.getDagId());
this.submitTime = proto.getTimestamp();
this.dagName = new String(proto.getEventPayload().toByteArray(), CHARSET_NAME);
}
@Override
public boolean writeToRecoveryImmediately() {
return true;
}
public String getDAGName() {
return this.dagName;
}
public DAGProtos.DAGPlan getDAGPlan() {
return this.dagPlan;
}
public TezDAGID getDagID() {
return dagID;
}
public ApplicationAttemptId getApplicationAttemptId() {
return applicationAttemptId;
}
public Map<String, LocalResource> getCumulativeAdditionalLocalResources() {
return cumulativeAdditionalLocalResources;
}
public long getSubmitTime() {
return submitTime;
}
public DAGPlan getDagPlan() {
return dagPlan;
}
public String getUser() {
return user;
}
public Configuration getConf() {
return conf;
}
public void setHistoryLoggingEnabled(boolean loggingEnabled) {
historyLoggingEnabled = loggingEnabled;
}
public boolean isHistoryLoggingEnabled() {
return historyLoggingEnabled;
}
public String getContainerLogs() {
return containerLogs;
}
public String getQueueName() {
return queueName;
}
}
| |
package de.hpi.is.ddd.algorithms.bruteforce;
import de.hpi.is.ddd.evaluation.Evaluation;
import de.hpi.is.ddd.evaluation.Evaluator;
import de.hpi.is.idd.datasets.CoraUtility;
import de.hpi.is.idd.interfaces.DatasetUtils;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.lang3.tuple.Pair;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;
import java.util.stream.Collectors;
/**
* Example Brute Force implementation for finding duplicates in local environment, simulating a distributed one.
*
* Given the full dataset, tries to uniformly split the pair comparisons across the given nodes.
*
* Replicates all the data to all nodes.
*
*/
public class ParallelBruteForce {
private final static CSVFormat FORMAT = CSVFormat.TDF.withFirstRecordAsHeader();
/* Number of nodes */
private int n;
/* Number of records */
private int m;
/* The whole set of records, that needs to be deduplicated */
private List<Map<String, Object>> records;
/* DatasetUtils of the current dataset */
private DatasetUtils du;
public ParallelBruteForce(int n, int m, List<Map<String, Object>> records, DatasetUtils du) {
this.n = n;
this.m = m;
this.records = records;
this.du = du;
}
public class PseudoNode {
Integer nodeID;
/* The whole set of records, that needs to be deduplicated */
private List<Map<String, Object>> records;
/* The records that are assigned to this node, to be checked to the remaining, to the right of them, records */
private List<Integer> recordsAssigned;
/* DatasetUtils of the current dataset */
private DatasetUtils du;
private long comparisons;
public PseudoNode(Integer nodeID) {
this.nodeID = nodeID;
this.comparisons = 0L;
}
/**
* Calculates the duplicates and writes them into HDFS.
*/
public Set<Pair<String, String>> deduplicate(Integer i, List<Map<String, Object>> records, DatasetUtils du) {
Set<Pair<String, String>> duplicates = new HashSet<>();
for (int j = i + 1; j < records.size(); ++j) {
Double sim = du.calculateSimilarity(records.get(i), records.get(j), null);
++comparisons;
if (sim >= du.getDatasetThreshold()) {
duplicates.add(Pair.of(String.valueOf(records.get(i).get("id")), String.valueOf(records.get(j).get("id"))));
}
}
return duplicates;
}
/**
* Calculates the duplicates and writes them into HDFS.
*/
public void deduplicateAll(List<Map<String, Object>> records, List<Integer> recordsAssigned, DatasetUtils du) {
System.out.println(nodeID + " started");
Set<Pair<String, String>> duplicates = new HashSet<>();
for (Integer i: recordsAssigned) {
duplicates.addAll(deduplicate(i, records, du));
}
writeToHDFS(duplicates);
System.out.println(nodeID + " finished");
}
private Set<Pair<String, String>> duplicatesInHDFS = new HashSet<>();
/**
* Writes the given duplicates to HDFS.
* @param duplicatesInHDFS
*/
private void writeToHDFS(Set<Pair<String, String>> duplicatesInHDFS) {
this.duplicatesInHDFS.addAll(duplicatesInHDFS);
}
/**
* @return the current node's duplicates, from the HDFS.
*/
public Set<Pair<String, String>> retrieveDuplicatesFromHDFS() {
return duplicatesInHDFS;
}
}
private long totalComparisons = 0L;
/**
* Having a partitioning, replicates data across nodes, assigning them the pairs they have to check for
* similarities and finally collects and merges the results.
*/
public Set<Pair<String, String>> deduplicate(Map<Integer, List<Integer>> partitioning) {
/* Replicate data, Send comparisons */
Map<Integer, PseudoNode> nodes = new HashMap<>();
for (int i = 0 ; i < n; ++i) {
nodes.put(i, new PseudoNode(i));
}
/* Execute */
nodes.entrySet().parallelStream().forEach(x -> x.getValue().deduplicateAll(records, partitioning.get(x.getKey()), du));
/* Collect and merge results */
List<Set<Pair<String, String>>> duplicatesLists = nodes.entrySet().parallelStream().map(x -> x.getValue().retrieveDuplicatesFromHDFS()).collect(Collectors.toList());
Set<Pair<String, String>> duplicates = new HashSet<>();
/* From each list, get the set x and from each set x add the pair y to the duplicates set */
duplicatesLists.forEach(x -> x.forEach(duplicates::add));
/* Retrieve the calculated comparisons */
for (int i = 0 ; i < n; ++i) {
totalComparisons += nodes.get(i).comparisons;
}
return duplicates;
}
/**
*
* Using a very simple technique partitions the comparisons across the nodes so that every node has close to the
* average number of comparisons to do.
*
* TODO: Replace with a closed type, instead of the greedy calculation that takes place now.
*
* @return a partitioning of the comparisons across nodes.
*
* It is in the form of: partition --> [record idx]
*/
public static Map<Integer, List<Integer>> getPartitioning(int n, int m) {
/* Total number of comparisons between all records */
int totalCmp = (int) Math.floor(m * (m - 1) / 2.0);
/* Average number of comparisons that a node has to do */
int avgCmp = (int) Math.floor(totalCmp / n);
System.out.println("Average number of comparisons: " + avgCmp);
/* The final partitioning that will be returned */
Map<Integer, List<Integer>> partitioning = new HashMap<>();
/* We begin an iteration, so that when we cross the threshold of the average comparisons we move on to the next
* node.
*
* Which records are assigned to this node, that have to be compared with everything on the right of them
*/
List<Integer> recordsAssigned = new ArrayList<>();
int nodeCmp = 0; // How many comparisons has the current node to do.
for (int i = 0; i < m ; ++i) { // Go through all the records.
nodeCmp += m - (i + 1); // Add the comparisons of the current record, with the right of it.
recordsAssigned.add(i); // This record is assigned to this node.
/* Number of comparisons is greater or equal to the average.
* We clear everything and start preparing/counting for the next node.
*/
if (nodeCmp >= avgCmp) {
int nodeIdx = partitioning.size();
partitioning.put(nodeIdx, recordsAssigned);
System.out.println("Node: " + nodeIdx + " assigned: " + nodeCmp + " comparisons.");
nodeCmp = 0;
recordsAssigned = new ArrayList<>();
}
}
if (nodeCmp > 0) { // Last node
int nodeIdx = partitioning.size();
partitioning.put(nodeIdx, recordsAssigned);
System.out.println("Node: " + nodeIdx + " assigned: " + nodeCmp + " comparisons.");
}
return partitioning;
}
/**
* Simple example that shows a dummy partitioning.
*
* @param args
*/
public static void main(String[] args) {
Map<String, String> argsMap = new HashMap<>();
for (String arg : args) {
String[] toks = arg.split("=");
argsMap.put(toks[0], toks[1]);
}
int n = 8; // Number of nodes
if (argsMap.containsKey("n")) {
n = Integer.parseInt(argsMap.get("n"));
}
File dataset = new File("/data/datasets/incremental_duplicate_detection/cora/cora_v3.tsv");
if (argsMap.containsKey("dataset")) {
dataset = new File(argsMap.get("dataset"));
}
DatasetUtils du = new CoraUtility();
File goldStandard = new File("/data/datasets/incremental_duplicate_detection/cora/cora_ground_truth.tsv");
if (argsMap.containsKey("goldStandard")) {
goldStandard = new File(argsMap.get("goldStandard"));
}
List<Map<String, Object>> records = new ArrayList<>();
try (CSVParser parser = FORMAT.parse(new InputStreamReader(new FileInputStream(dataset)))) {
du = new CoraUtility();
for (CSVRecord record : parser) {
Map<String, Object> rec = du.parseRecord(record.toMap());
records.add(rec);
}
} catch (IOException e) {
throw new RuntimeException("Error while parsing the dataset file", e);
}
int m = records.size();
Evaluator evaluator = new Evaluator(goldStandard);
ParallelBruteForce pbf = new ParallelBruteForce(n, records.size(), records, du);
Map<Integer, List<Integer>> partitioning = pbf.getPartitioning(n, m);
System.out.println("Partitioning; " + partitioning);
Set<Pair<String, String>> duplicates = pbf.deduplicate(partitioning);
System.out.println("Duplicates (pairs): " + duplicates.size());
evaluator.setTotalComparisons(pbf.totalComparisons);
Evaluation evaluation = evaluator.evaluate(duplicates);
System.out.println("Evaluation: " + evaluation);
}
}
| |
package org.python.types;
import java.util.ArrayList;
public class Function extends org.python.types.Object implements org.python.Callable {
public static final int CO_OPTIMIZED = 0x1;
public static final int CO_NEWLOCALS = 0x2;
public static final int CO_VARARGS = 0x4;
public static final int CO_VARKEYWORDS = 0x8;
public static final int CO_NESTED = 0x10;
public static final int CO_GENERATOR = 0x20;
public static final int CO_NOFREE = 0x40;
org.python.types.Type.Origin origin;
org.python.types.Str name;
org.python.types.Code code;
java.lang.reflect.Method method;
java.util.Map<java.lang.String, org.python.Object> globals;
java.util.List<org.python.Object> default_args;
java.util.Map<java.lang.String, org.python.Object> default_kwargs;
org.python.types.Closure closure;
private void populateAttrs() {
org.python.types.Str name = new org.python.types.Str(method.getName());
this.__dict__.put("__name__", this.name);
if (this.name != null) {
this.__dict__.put("__qualname__", this.name);
} else {
org.python.Object co_name = this.code.__dict__.get("co_consts");
this.__dict__.put("__qualname__", co_name);
}
this.__dict__.put("__code__", this.code);
org.python.Object doc;
try {
org.python.types.Tuple consts = (org.python.types.Tuple) this.code.co_consts;
if (consts != null && ((org.python.types.Int) consts.count()).value != 0) {
doc = consts.__getitem__(new org.python.types.Int(0));
} else {
doc = org.python.types.NoneType.NONE;
}
} catch (java.lang.NullPointerException e) {
doc = org.python.types.NoneType.NONE;
} catch (java.lang.ClassCastException e) {
doc = org.python.types.NoneType.NONE;
} catch (java.lang.IndexOutOfBoundsException e) {
doc = org.python.types.NoneType.NONE;
}
this.__dict__.put("__doc__", doc);
// this.__dict__.put("__call__")
}
// Constructor for builtins
public Function(
java.lang.reflect.Method method,
java.lang.String[] args,
java.lang.String[] default_args,
java.lang.String vararg_name,
java.lang.String[] kwonlyargs,
java.lang.String kwargs_name) {
super();
this.origin = org.python.types.Type.Origin.BUILTIN;
this.name = new org.python.types.Str(method.getName());
this.method = method;
// System.out.println("CREATE FUNCTION 1 " + this.name);
// java.lang.Thread.currentThread().dumpStack();
long flags = 0;
long argcount = 0;
long kwonly = 0;
java.util.List<org.python.Object> varnames = new java.util.ArrayList<org.python.Object>();
for (java.lang.String arg : args) {
varnames.add(new org.python.types.Str(arg));
}
this.default_args = new java.util.ArrayList<org.python.Object>();
for (java.lang.String arg : default_args) {
this.default_args.add(null);
varnames.add(new org.python.types.Str(arg));
}
// The base argument count is the length of the arguments collected so far.
argcount = varnames.size();
if (vararg_name != null) {
flags |= CO_VARARGS;
varnames.add(new org.python.types.Str(vararg_name));
}
this.default_kwargs = new java.util.HashMap<java.lang.String, org.python.Object>();
for (java.lang.String arg : kwonlyargs) {
varnames.add(new org.python.types.Str(arg));
this.default_kwargs.put(arg, null);
}
if (kwargs_name != null) {
flags |= CO_VARKEYWORDS;
varnames.add(new org.python.types.Str(kwargs_name));
}
this.code = new org.python.types.Code(
new org.python.types.Int(argcount), // co_argcount
new org.python.types.Tuple(), // co_cellvars
null, // new org.python.types.Bytes(), // co_code
null, // new org.python.types.Tuple(), // co_consts
null, // new org.python.types.Str(), // co_filename
null, // new org.python.types.Int(), // co_firstlineno
new org.python.types.Int(flags), // co_flags
new org.python.types.Tuple(), // co_freevars
new org.python.types.Int(kwonlyargs.length), // co_kwonlyargcount
null, // new org.python.types.Bytes(), // co_lnotab
this.name, // co_name
null, // new org.python.types.Tuple(), // co_names
null, // new org.python.types.Int(), // co_nlocals
null, // new org.python.types.Int(), // co_stacksize
new org.python.types.Tuple(varnames) // co_varnames
);
populateAttrs();
}
// Constructor for Java shims of Python modules
// Constructor for normal Python functions
public Function(
org.python.types.Str name,
org.python.types.Code code,
java.lang.reflect.Method method,
java.util.Map<java.lang.String, org.python.Object> globals,
java.util.List<org.python.Object> default_args,
java.util.Map<java.lang.String, org.python.Object> default_kwargs,
org.python.types.Closure closure) {
super();
// System.out.println("Create function 2 " + name);
// java.lang.Thread.currentThread().dumpStack();
this.origin = org.python.types.Type.Origin.PYTHON;
this.name = name;
this.code = code;
this.method = method;
this.globals = globals;
this.default_args = default_args;
this.default_kwargs = default_kwargs;
this.closure = closure;
populateAttrs();
}
@org.python.Method(
__doc__ = "Return repr(self)."
)
public org.python.Object __repr__() {
return new org.python.types.Str(String.format("<%s %s at 0x%x>", this.typeName(), this.name, this.hashCode()));
}
@org.python.Method(
__doc__ = ""
)
public org.python.Object __get__(org.python.Object instance, org.python.Object klass) {
// System.out.println("__GET__ on function " + this + " " + this.getClass() + " " + instance + " " + instance.getClass());
if (instance != klass) {
if (instance instanceof org.python.types.Closure) {
return new org.python.types.Function(this.name, this.code, this.method, this.globals, this.default_args, this.default_kwargs, (org.python.types.Closure) instance);
} else if (!(instance instanceof org.python.types.Module)) {
return new org.python.types.Method(instance, (org.python.types.Type) klass, this);
}
}
return this;
}
@org.python.Method(
__doc__ = ""
)
public org.python.Object __bool__() {
return new org.python.types.Bool(true);
}
private void checkMissingArgs(int requiredArgs, int passedArgs, java.util.Map<java.lang.String, org.python.Object> kwargs, java.util.List<org.python.Object> varnames, int first_arg) {
int n_missing_pos_args = requiredArgs - passedArgs;
java.util.List<String> missingArgs = new ArrayList<String>();
if (n_missing_pos_args > 0) {
// build list of actual missing args, checking if haven't been passed as kwargs
for (int i = first_arg; i < n_missing_pos_args; i++) {
java.lang.String argname = ((String) varnames.get(i + passedArgs).toJava());
if (!kwargs.containsKey(argname)) {
missingArgs.add(argname);
}
}
if (missingArgs.size() > 0) {
// we show missing args using Oxford comma, as CPython does
StringBuilder sb = new StringBuilder();
for (int i = 0; i < missingArgs.size(); i++) {
sb.append("'" + missingArgs.get(i) + "'");
if (i <= missingArgs.size() - 2) {
sb.append(", ");
}
if (i == missingArgs.size() - 2) {
sb.append("and ");
}
}
throw new org.python.exceptions.TypeError(this.name + "() missing " + missingArgs.size() + " required positional "
+ (missingArgs.size() == 1 ? "argument" : "arguments") + ": " + sb.toString());
}
}
}
private void throwUnexpectedPositionalArgumentsError(int numExpected, int numGot) {
String posArgs = numExpected + " positional argument" + (numExpected == 1 ? "" : "s");
String givenArgs = numGot + (numGot == 1 ? " was given" : " were given");
String mesg = this.name.value + "() takes " + posArgs + " but " + givenArgs;
throw new org.python.exceptions.TypeError(mesg);
}
java.lang.Object[] adjustArguments(org.python.Object instance, org.python.Object[] args, java.util.Map<java.lang.String, org.python.Object> kwargs) {
// if (kwargs.size() > 0) {
// // TODO: This doesn't have to be so - we *could* introspect argument names.
// throw new org.python.exceptions.RuntimeError("Cannot use kwargs to invoke a native Java method.");
// }
int argcount = (int) this.code.co_argcount.value;
int kwonlyargcount = (int) this.code.co_kwonlyargcount.value;
int flags = (int) this.code.co_flags.value;
java.util.List<org.python.Object> varnames = this.code.co_varnames.value;
int first_arg = 0;
int has_varargs = 0;
int has_varkwargs = 0;
// System.out.println("Instance = " + instance);
// System.out.println("method:" + method);
// System.out.println("args:" + args.length);
// System.out.println("kwargs:" + kwargs);
// System.out.println("argcount = " + argcount);
// System.out.println("kwonlyargcount = " + kwonlyargcount);
int n_args = argcount + kwonlyargcount;
if ((flags & CO_VARARGS) != 0) {
// System.out.println("HAS VARARGS");
n_args += 1;
has_varargs = 1;
}
if ((flags & CO_VARKEYWORDS) != 0) {
// System.out.println("HAS VARKEYWORDS");
n_args += 1;
has_varkwargs = 1;
}
int required_args = argcount - this.default_args.size();
// System.out.println("nargs = " + n_args);
// System.out.println("first default = " + required_args);
if (0 == has_varargs && args != null && args.length > n_args) {
throwUnexpectedPositionalArgumentsError(n_args, args.length);
}
// If there are genuinely *no* arguments - not even self - return null;
if (n_args == 0) {
return null;
}
java.lang.Object[] adjusted = new java.lang.Object[n_args];
// If this is an instance, the first argument will be self; we don't
// need to pass this to the Java function.
if (instance != null && (java.lang.reflect.Modifier.isStatic(method.getModifiers()) || this.closure != null)) {
// System.out.println("CALL USING INSTANCE");
first_arg = 1;
adjusted[0] = instance;
// System.out.println(" aARG 0: " + instance);
}
checkMissingArgs(required_args, (args == null ? 0 : args.length), kwargs, varnames, first_arg);
// System.out.println("First arg = " + first_arg);
// Populate the positional args.
for (int i = 0; i < argcount - first_arg; i++) {
if (i < args.length) {
// System.out.println(" b" + (i + first_arg));
adjusted[i + first_arg] = args[i];
// System.out.println(" bARG " + (i + first_arg) + ": " + args[i]);
if (kwargs != null) {
java.lang.String varname = ((org.python.types.Str) varnames.get(i)).value;
org.python.Object value = kwargs.remove(varname);
if (value != null) {
throw new org.python.exceptions.TypeError(this.name + "() got multiple values for argument '" + varname + "'");
}
}
} else {
// Use a default argument. They might be specified as a kwarg.
// System.out.println(" c" + (i + first_arg));
org.python.Object value = null;
if (kwargs != null) {
java.lang.String varname = ((org.python.types.Str) varnames.get(i + first_arg)).value;
value = kwargs.remove(varname);
}
if (value == null) {
value = this.default_args.get(i + first_arg - required_args);
}
adjusted[i + first_arg] = value;
// System.out.println(" cARG " + i + ": " + value);
}
}
// Create a tuple for the varargs
if ((flags & CO_VARARGS) != 0) {
// System.out.println("Handle varargs");
// Construct Python tuple object
org.python.types.Tuple tuple = new org.python.types.Tuple(
java.util.Arrays.asList(java.util.Arrays.copyOfRange(args, argcount - first_arg, args.length)));
adjusted[argcount] = tuple;
// System.out.println(" dARG " + argcount + ": " + tuple);
}
// Populate the kwonly args
for (int i = 0; i < kwonlyargcount; i++) {
java.lang.String varname = ((org.python.types.Str) varnames.get(argcount + has_varargs + i)).value;
// System.out.println(" e" + (argcount + has_varargs + i) + " " + varname);
org.python.Object value = kwargs.remove(varname);
if (value == null) {
value = this.default_kwargs.get(varname);
}
adjusted[argcount + has_varargs + i] = value;
// System.out.println(" eARG " + (argcount + has_varargs + i) + ": " + value);
}
// Add remaining kwargs to kwargs argument if we have one.
if ((flags & CO_VARKEYWORDS) != 0) {
// System.out.println("Handle varkwargs = " + kwargs);
org.python.types.Dict kwargDict = new org.python.types.Dict();
for (java.util.Map.Entry<java.lang.String, org.python.Object> entry : kwargs.entrySet()) {
// System.out.println("Add KWARG" + entry.getKey());
kwargDict.__setitem__(new org.python.types.Str(entry.getKey()), entry.getValue());
}
adjusted[adjusted.length - 1] = kwargDict;
// System.out.println(" fARG " + (adjusted.length - 1) + ": " + kwargDict);
}
return adjusted;
}
public org.python.Object invoke(org.python.Object[] args, java.util.Map<java.lang.String, org.python.Object> kwargs) {
return this.invoke(null, args, kwargs);
}
public org.python.Object invoke(org.python.Object instance, org.python.Object[] args, java.util.Map<java.lang.String, org.python.Object> kwargs) {
try {
// org.Python.debug("Function:", this.name);
// org.Python.debug(" instance: ", instance);
// for (org.python.Object arg: args) {
// org.Python.debug(" arg: ", arg);
// if (arg != null) {
// org.Python.debug(" type: ", arg.getClass());
// }
// }
// org.Python.debug(" kwargs: ", kwargs);
// org.Python.debug(" default args: ", this.default_args);
// org.Python.debug(" default kwargs: ", this.default_kwargs);
// if this.__dict__.__code__.co_flags & CO_GENERATOR:
// gen = Generator(frame, self._vm)
// frame.generator = gen
// retval = gen
// else:
java.lang.Object[] adjusted_args = adjustArguments(instance, args, kwargs);
// if (adjusted_args != null) {
// for (java.lang.Object arg: adjusted_args) {
// org.Python.debug(" Adjusted arg: ", arg);
// if (arg != null) {
// org.Python.debug(" type: ", arg.getClass());
// }
// }
// } else {
// org.Python.debug("No adjusted args");
// }
// Python methods are stored as static methods on the class, so
// the instance argument is passed in as a regular method argument,
// not as the implied Java register 0. Builtins and closure methods
// require the instance to be passed as the explicit instance.
if (java.lang.reflect.Modifier.isStatic(this.method.getModifiers())) {
return org.python.types.Type.toPython(this.method.invoke(null, adjusted_args));
} else if (this.closure != null) {
return org.python.types.Type.toPython(this.method.invoke(this.closure, adjusted_args));
} else {
return org.python.types.Type.toPython(this.method.invoke(instance, adjusted_args));
}
} catch (java.lang.IllegalAccessException e) {
throw new org.python.exceptions.RuntimeError("Illegal access to Java method " + this.method);
} catch (java.lang.reflect.InvocationTargetException e) {
try {
// org.Python.debug("Exception:", e.getTargetException());
// for (java.lang.StackTraceElement ste: e.getTargetException().getStackTrace()) {
// org.Python.debug(" ", ste);
// }
// If the Java method raised an Python exception, re-raise that
// exception as-is. If it wasn"t a Python exception, wrap it
// as one and continue.
throw (org.python.exceptions.BaseException) e.getCause();
} catch (ClassCastException java_e) {
java.lang.String message = e.getCause().getMessage();
if (message == null) {
message = e.getCause().getClass().getName();
}
throw new org.python.exceptions.RuntimeError(message);
}
// } finally {
// System.out.println("INVOKE METHOD DONE");
}
}
}
| |
/*!
* mifmi-commons4j
* https://github.com/mifmi/mifmi-commons4j
*
* Copyright (c) 2015 mifmi.org and other contributors
* Released under the MIT license
* https://opensource.org/licenses/MIT
*/
package org.mifmi.commons4j.app.web.servlet;
import java.io.IOException;
import java.lang.reflect.Method;
import jakarta.servlet.ServletException;
import jakarta.servlet.http.HttpServlet;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.mifmi.commons4j.web.servlet.HttpReqRes;
import org.mifmi.commons4j.web.servlet.MifmiServletException;
public abstract class AbstractHttpServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private ThreadLocal<HttpReqRes> localReqRes = new ThreadLocal<HttpReqRes>();
protected void doInit() throws Exception {
}
protected void doDestroy() throws Exception {
}
protected void preService() throws Exception {
}
protected void postService() throws Exception {
}
protected void doGet() throws Exception {
super.doGet(request(), response());
}
protected void doPost() throws Exception {
super.doPost(request(), response());
}
protected void doPut() throws Exception {
super.doPut(request(), response());
}
protected void doDelete() throws Exception {
super.doDelete(request(), response());
}
protected boolean handleError(Throwable e) throws Exception {
response().sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return false;
}
private void initService(HttpServletRequest request, HttpServletResponse response) throws Exception {
HttpReqRes reqRes = new HttpReqRes(request, response);
this.localReqRes.set(reqRes);
doInit();
}
private void destroyService(HttpServletRequest request, HttpServletResponse response) throws Exception {
doDestroy();
// this.localRequest.remove();
// this.localResponse.remove();
// this.localIsMultipartRequest.remove();
// this.localParameterMap.remove();
// this.localParameterPartMap.remove();
}
@Override
protected void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
initService(request, response);
try {
preService();
super.service(request, response);
} finally {
postService();
}
} catch (Throwable e) {
try {
handleError(e);
} catch (RuntimeException e1) {
throw e1;
} catch (ServletException e1) {
throw e1;
} catch (IOException e1) {
throw e1;
} catch (Exception e1) {
throw new MifmiServletException(e1);
}
} finally {
try {
destroyService(request, response);
} catch (Throwable e) {
try {
handleError(e);
} catch (RuntimeException e1) {
throw e1;
} catch (ServletException e1) {
throw e1;
} catch (IOException e1) {
throw e1;
} catch (Exception e1) {
throw new MifmiServletException(e1);
}
}
}
}
@Override
protected final void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
doGet();
} catch (RuntimeException e) {
throw e;
} catch (ServletException e) {
throw e;
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new MifmiServletException(e);
}
}
@Override
protected final void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
doPost();
} catch (RuntimeException e) {
throw e;
} catch (ServletException e) {
throw e;
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new MifmiServletException(e);
}
}
@Override
protected final void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
doPut();
} catch (RuntimeException e) {
throw e;
} catch (ServletException e) {
throw e;
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new MifmiServletException(e);
}
}
@Override
protected final void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
doDelete();
} catch (RuntimeException e) {
throw e;
} catch (ServletException e) {
throw e;
} catch (IOException e) {
throw e;
} catch (Exception e) {
throw new MifmiServletException(e);
}
}
@Override
protected final void doTrace(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Class<?> clazz = this.getClass();
boolean allowGet = (getDeclaredMethod(clazz, AbstractHttpServlet.class, "doGet") != null);
boolean allowHead = allowGet;
boolean allowPost = (getDeclaredMethod(clazz, AbstractHttpServlet.class, "doPost") != null);
boolean allowPut = (getDeclaredMethod(clazz, AbstractHttpServlet.class, "doPut") != null);
boolean allowDelete = (getDeclaredMethod(clazz, AbstractHttpServlet.class, "doDelete") != null);
boolean allowTrace = true;
boolean allowOptions = true;
StringBuilder allowSb = new StringBuilder();
if (allowGet) {
allowSb.append("GET");
}
if (allowHead) {
if (allowSb.length() != 0) {
allowSb.append(", ");
}
allowSb.append("HEAD");
}
if (allowPost) {
if (allowSb.length() != 0) {
allowSb.append(", ");
}
allowSb.append("POST");
}
if (allowPut) {
if (allowSb.length() != 0) {
allowSb.append(", ");
}
allowSb.append("PUT");
}
if (allowDelete) {
if (allowSb.length() != 0) {
allowSb.append(", ");
}
allowSb.append("DELETE");
}
if (allowTrace) {
if (allowSb.length() != 0) {
allowSb.append(", ");
}
allowSb.append("TRACE");
}
if (allowOptions) {
if (allowSb.length() != 0) {
allowSb.append(", ");
}
allowSb.append("OPTIONS");
}
response().setHeader("Allow", allowSb.toString());
}
@Override
protected final void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
super.doOptions(request, response);
}
private static Method getDeclaredMethod(Class<?> clazz, Class<?> searchBreakClazz, String name, Class<?>... parameterTypes) {
if (searchBreakClazz != null) {
if (clazz.equals(searchBreakClazz)) {
return null;
}
}
Method method;
try {
method = clazz.getDeclaredMethod(name, parameterTypes);
} catch (NoSuchMethodException e) {
if (clazz.equals(Object.class)) {
return null;
}
return getDeclaredMethod(clazz.getSuperclass(), searchBreakClazz, name, parameterTypes);
}
return method;
}
protected HttpReqRes reqres() {
return this.localReqRes.get();
}
protected HttpServletRequest request() {
return reqres().request();
}
protected HttpServletResponse response() {
return reqres().response();
}
protected void forward(CharSequence path) {
reqres().forward(path);
}
protected void redirect(CharSequence path) {
reqres().redirect(path);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheRebalanceMode;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheRebalanceMode.ASYNC;
import static org.apache.ignite.cache.CacheRebalanceMode.NONE;
import static org.apache.ignite.cache.CacheRebalanceMode.SYNC;
import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC;
import static org.apache.ignite.configuration.CacheConfiguration.DFLT_REBALANCE_BATCH_SIZE;
import static org.apache.ignite.configuration.DeploymentMode.CONTINUOUS;
/**
* Test node restart.
*/
public abstract class GridCachePreloadRestartAbstractSelfTest extends GridCommonAbstractTest {
/** Flag for debug output. */
private static final boolean DEBUG = false;
/** Cache name. */
private static final String CACHE_NAME = "TEST_CACHE";
/** */
private static final long TEST_TIMEOUT = 5 * 60 * 1000;
/** Default backups. */
private static final int DFLT_BACKUPS = 1;
/** Partitions. */
private static final int DFLT_PARTITIONS = 521;
/** Preload batch size. */
private static final int DFLT_BATCH_SIZE = DFLT_REBALANCE_BATCH_SIZE;
/** Number of key backups. Each test method can set this value as required. */
private int backups = DFLT_BACKUPS;
/** */
private static final int DFLT_NODE_CNT = 4;
/** */
private static final int DFLT_KEY_CNT = 100;
/** */
private static final int DFLT_RETRIES = 2;
/** */
private static volatile int idx = -1;
/** Preload mode. */
private CacheRebalanceMode preloadMode = ASYNC;
/** */
private int preloadBatchSize = DFLT_BATCH_SIZE;
/** Number of partitions. */
private int partitions = DFLT_PARTITIONS;
/** Node count. */
private int nodeCnt = DFLT_NODE_CNT;
/** Key count. */
private int keyCnt = DFLT_KEY_CNT;
/** Retries. */
private int retries = DFLT_RETRIES;
/** */
private static final TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration c = super.getConfiguration(gridName);
// Discovery.
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
c.setDiscoverySpi(disco);
c.setDeploymentMode(CONTINUOUS);
// Cache.
CacheConfiguration cc = defaultCacheConfiguration();
cc.setName(CACHE_NAME);
cc.setCacheMode(PARTITIONED);
cc.setWriteSynchronizationMode(FULL_SYNC);
cc.setStartSize(20);
cc.setRebalanceMode(preloadMode);
cc.setRebalanceBatchSize(preloadBatchSize);
cc.setAffinity(new RendezvousAffinityFunction(false, partitions));
cc.setBackups(backups);
cc.setAtomicityMode(TRANSACTIONAL);
if (!nearEnabled())
cc.setNearConfiguration(null);
c.setCacheConfiguration(cc);
return c;
}
/**
* @return {@code True} if near cache is enabled.
*/
protected abstract boolean nearEnabled();
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
backups = DFLT_BACKUPS;
partitions = DFLT_PARTITIONS;
preloadMode = ASYNC;
preloadBatchSize = DFLT_BATCH_SIZE;
nodeCnt = DFLT_NODE_CNT;
keyCnt = DFLT_KEY_CNT;
retries = DFLT_RETRIES;
idx = -1;
// resetLog4j(Level.DEBUG, true,
// // Categories.
// GridDhtPreloader.class.getPackage().getName(),
// GridDhtPartitionTopologyImpl.class.getName(),
// GridDhtLocalPartition.class.getName());
}
/** {@inheritDoc} */
@Override protected long getTestTimeout() {
return TEST_TIMEOUT;
}
/**
* @throws Exception If failed.
*/
private void startGrids() throws Exception {
for (int i = 0; i < nodeCnt; i++) {
startGrid(i);
if (idx < 0)
idx = i;
}
}
/**
* @throws Exception If failed.
*/
private void stopGrids() throws Exception {
for (int i = 0; i < nodeCnt; i++)
stopGrid(i);
}
/**
* @throws Exception If failed.
*/
public void testSyncPreloadRestart() throws Exception {
preloadMode = SYNC;
checkRestart();
}
/**
* @throws Exception If failed.
*/
public void testAsyncPreloadRestart() throws Exception {
preloadMode = ASYNC;
checkRestart();
}
/**
* @throws Exception If failed.
*/
public void testDisabledPreloadRestart() throws Exception {
preloadMode = NONE;
checkRestart();
}
/**
* @param c Cache projection.
*/
private void affinityBeforeStop(IgniteCache<Integer, String> c) {
for (int key = 0; key < keyCnt; key++) {
int part = affinity(c).partition(key);
info("Affinity nodes before stop [key=" + key + ", partition" + part + ", nodes=" +
U.nodeIds(affinity(c).mapPartitionToPrimaryAndBackups(part)) + ']');
}
}
/**
* @param c Cache projection.
*/
private void affinityAfterStart(IgniteCache<Integer, String> c) {
if (DEBUG) {
for (int key = 0; key < keyCnt; key++) {
int part = affinity(c).partition(key);
info("Affinity odes after start [key=" + key + ", partition" + part + ", nodes=" +
U.nodeIds(affinity(c).mapPartitionToPrimaryAndBackups(part)) + ']');
}
}
}
/**
* @throws Exception If failed.
*/
private void checkRestart() throws Exception {
info("*** STARTING TEST ***");
startGrids();
try {
IgniteCache<Integer, String> c = grid(idx).cache(CACHE_NAME);
for (int j = 0; j < retries; j++) {
for (int i = 0; i < keyCnt; i++)
c.put(i, Integer.toString(i));
info("Stored items.");
checkGet(c, j);
info("Stopping node: " + idx);
affinityBeforeStop(c);
stopGrid(idx);
info("Starting node: " + idx);
Ignite ignite = startGrid(idx);
c = ignite.cache(CACHE_NAME);
affinityAfterStart(c);
checkGet(c, j);
}
}
finally {
stopGrids();
}
}
/**
* @param c Cache.
* @param attempt Attempt.
* @throws Exception If failed.
*/
private void checkGet(IgniteCache<Integer, String> c, int attempt) throws Exception {
for (int i = 0; i < keyCnt; i++) {
String v = c.get(i);
if (v == null) {
printFailureDetails(c, i, attempt);
fail("Value is null [key=" + i + ", attempt=" + attempt + "]");
}
if (!Integer.toString(i).equals(v)) {
printFailureDetails(c, i, attempt);
fail("Wrong value for key [key=" +
i + ", actual value=" + v + ", expected value=" + Integer.toString(i) + "]");
}
}
info("Read items.");
}
/**
* @param c Cache projection.
* @param key Key.
* @param attempt Attempt.
*/
private void printFailureDetails(IgniteCache<Integer, String> c, int key, int attempt) {
error("*** Failure details ***");
error("Key: " + key);
error("Partition: " + c.getConfiguration(CacheConfiguration.class).getAffinity().partition(key));
error("Attempt: " + attempt);
error("Node: " + c.unwrap(Ignite.class).cluster().localNode().id());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jdbc.store.file;
import javax.sql.DataSource;
import java.nio.ByteBuffer;
import java.sql.Blob;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import org.apache.activemq.artemis.jdbc.store.drivers.AbstractJDBCDriver;
import org.apache.activemq.artemis.jdbc.store.sql.SQLProvider;
import org.jboss.logging.Logger;
@SuppressWarnings("SynchronizeOnNonFinalField")
public class JDBCSequentialFileFactoryDriver extends AbstractJDBCDriver {
private static final Logger logger = Logger.getLogger(JDBCSequentialFileFactoryDriver.class);
protected PreparedStatement deleteFile;
protected PreparedStatement createFile;
protected PreparedStatement selectFileByFileName;
protected PreparedStatement copyFileRecord;
protected PreparedStatement renameFile;
protected PreparedStatement readLargeObject;
protected PreparedStatement appendToLargeObject;
protected PreparedStatement selectFileNamesByExtension;
JDBCSequentialFileFactoryDriver() {
super();
}
JDBCSequentialFileFactoryDriver(DataSource dataSource, SQLProvider provider) {
super(dataSource, provider);
}
JDBCSequentialFileFactoryDriver(Connection connection, SQLProvider sqlProvider) {
super(connection, sqlProvider);
}
@Override
protected void createSchema() throws SQLException {
createTable(sqlProvider.getCreateFileTableSQL());
}
@Override
protected void prepareStatements() throws SQLException {
this.deleteFile = connection.prepareStatement(sqlProvider.getDeleteFileSQL());
this.createFile = connection.prepareStatement(sqlProvider.getInsertFileSQL(), new String[] {"ID"});
this.selectFileByFileName = connection.prepareStatement(sqlProvider.getSelectFileByFileName());
this.copyFileRecord = connection.prepareStatement(sqlProvider.getCopyFileRecordByIdSQL());
this.renameFile = connection.prepareStatement(sqlProvider.getUpdateFileNameByIdSQL());
this.readLargeObject = connection.prepareStatement(sqlProvider.getReadLargeObjectSQL());
this.appendToLargeObject = connection.prepareStatement(sqlProvider.getAppendToLargeObjectSQL(), ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE);
this.selectFileNamesByExtension = connection.prepareStatement(sqlProvider.getSelectFileNamesByExtensionSQL());
}
public List<String> listFiles(String extension) throws Exception {
synchronized (connection) {
List<String> fileNames = new ArrayList<>();
try {
connection.setAutoCommit(false);
selectFileNamesByExtension.setString(1, extension);
try (ResultSet rs = selectFileNamesByExtension.executeQuery()) {
while (rs.next()) {
fileNames.add(rs.getString(1));
}
}
connection.commit();
} catch (SQLException e) {
connection.rollback();
throw e;
}
return fileNames;
}
}
/**
* Opens the supplied file. If the file does not exist in the database it will create a new one.
*
* @param file
* @throws SQLException
*/
public void openFile(JDBCSequentialFile file) throws SQLException {
final long fileId = fileExists(file);
if (fileId < 0) {
createFile(file);
} else {
file.setId(fileId);
loadFile(file);
}
}
/**
* Checks to see if a file with filename and extension exists. If so returns the ID of the file or returns -1.
*
* @param file
* @return
* @throws SQLException
*/
public long fileExists(JDBCSequentialFile file) throws SQLException {
try {
synchronized (connection) {
connection.setAutoCommit(false);
selectFileByFileName.setString(1, file.getFileName());
try (ResultSet rs = selectFileByFileName.executeQuery()) {
final long id = rs.next() ? rs.getLong(1) : -1;
connection.commit();
return id;
} catch (Exception e) {
connection.rollback();
throw e;
}
}
} catch (NullPointerException npe) {
npe.printStackTrace();
throw npe;
}
}
/**
* Loads an existing file.
*
* @param file
* @throws SQLException
*/
public void loadFile(JDBCSequentialFile file) throws SQLException {
synchronized (connection) {
connection.setAutoCommit(false);
readLargeObject.setLong(1, file.getId());
try (ResultSet rs = readLargeObject.executeQuery()) {
if (rs.next()) {
Blob blob = rs.getBlob(1);
if (blob != null) {
file.setWritePosition((int) blob.length());
} else {
logger.warn("ERROR NO BLOB FOR FILE" + "File: " + file.getFileName() + " " + file.getId());
}
}
connection.commit();
} catch (SQLException e) {
connection.rollback();
throw e;
}
}
}
/**
* Creates a new database row representing the supplied file.
*
* @param file
* @throws SQLException
*/
public void createFile(JDBCSequentialFile file) throws SQLException {
synchronized (connection) {
try {
connection.setAutoCommit(false);
createFile.setString(1, file.getFileName());
createFile.setString(2, file.getExtension());
createFile.setBytes(3, new byte[0]);
createFile.executeUpdate();
try (ResultSet keys = createFile.getGeneratedKeys()) {
keys.next();
file.setId(keys.getLong(1));
}
connection.commit();
} catch (SQLException e) {
connection.rollback();
throw e;
}
}
}
/**
* Updates the fileName field to the new value.
*
* @param file
* @param newFileName
* @throws SQLException
*/
public void renameFile(JDBCSequentialFile file, String newFileName) throws SQLException {
synchronized (connection) {
try {
connection.setAutoCommit(false);
renameFile.setString(1, newFileName);
renameFile.setLong(2, file.getId());
renameFile.executeUpdate();
connection.commit();
} catch (SQLException e) {
connection.rollback();
throw e;
}
}
}
/**
* Deletes the associated row in the database.
*
* @param file
* @throws SQLException
*/
public void deleteFile(JDBCSequentialFile file) throws SQLException {
synchronized (connection) {
try {
connection.setAutoCommit(false);
deleteFile.setLong(1, file.getId());
deleteFile.executeUpdate();
connection.commit();
} catch (SQLException e) {
connection.rollback();
throw e;
}
}
}
/**
* Persists data to this files associated database mapping.
*
* @param file
* @param data
* @return
* @throws SQLException
*/
public int writeToFile(JDBCSequentialFile file, byte[] data) throws SQLException {
synchronized (connection) {
connection.setAutoCommit(false);
appendToLargeObject.setLong(1, file.getId());
int bytesWritten = 0;
try (ResultSet rs = appendToLargeObject.executeQuery()) {
if (rs.next()) {
Blob blob = rs.getBlob(1);
if (blob == null) {
blob = connection.createBlob();
}
bytesWritten = blob.setBytes(blob.length() + 1, data);
rs.updateBlob(1, blob);
rs.updateRow();
}
connection.commit();
return bytesWritten;
} catch (SQLException e) {
connection.rollback();
throw e;
}
}
}
/**
* Reads data from the file (at file.readPosition) into the byteBuffer.
*
* @param file
* @param bytes
* @return
* @throws SQLException
*/
public int readFromFile(JDBCSequentialFile file, ByteBuffer bytes) throws SQLException {
synchronized (connection) {
connection.setAutoCommit(false);
readLargeObject.setLong(1, file.getId());
int readLength = 0;
try (ResultSet rs = readLargeObject.executeQuery()) {
if (rs.next()) {
final Blob blob = rs.getBlob(1);
if (blob != null) {
readLength = (int) calculateReadLength(blob.length(), bytes.remaining(), file.position());
byte[] data = blob.getBytes(file.position() + 1, readLength);
bytes.put(data);
}
}
connection.commit();
return readLength;
} catch (Throwable e) {
throw e;
} finally {
connection.rollback();
}
}
}
/**
* Copy the data content of FileFrom to FileTo
*
* @param fileFrom
* @param fileTo
* @throws SQLException
*/
public void copyFileData(JDBCSequentialFile fileFrom, JDBCSequentialFile fileTo) throws SQLException {
synchronized (connection) {
try {
connection.setAutoCommit(false);
copyFileRecord.setLong(1, fileFrom.getId());
copyFileRecord.setLong(2, fileTo.getId());
copyFileRecord.executeUpdate();
connection.commit();
} catch (SQLException e) {
connection.rollback();
throw e;
}
}
}
/**
* Drop all tables and data
*/
@Override
public void destroy() throws SQLException {
synchronized (connection) {
try {
connection.setAutoCommit(false);
try (Statement statement = connection.createStatement()) {
statement.executeUpdate(sqlProvider.getDropFileTableSQL());
}
connection.commit();
} catch (SQLException e) {
connection.rollback();
throw e;
}
}
}
public long calculateReadLength(long objectLength, int bufferSpace, long readPosition) {
long bytesRemaining = objectLength - readPosition;
if (bytesRemaining > bufferSpace) {
return bufferSpace;
} else {
return bytesRemaining;
}
}
public long getMaxSize() {
return sqlProvider.getMaxBlobSize();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.persistence.graph.serialization.impl.shard.impl;
import java.util.Collections;
import java.util.Iterator;
import java.util.concurrent.ExecutionException;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.apache.usergrid.persistence.core.consistency.TimeService;
import org.apache.usergrid.persistence.core.executor.TaskExecutorFactory;
import org.apache.usergrid.persistence.core.scope.ApplicationScopeImpl;
import org.apache.usergrid.persistence.graph.MarkedEdge;
import org.apache.usergrid.persistence.graph.impl.SimpleMarkedEdge;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.AsyncTaskExecutor;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.DirectedEdgeMeta;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.EdgeShardSerialization;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.Shard;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.ShardEntryGroup;
import org.apache.usergrid.persistence.graph.serialization.impl.shard.ShardGroupDeletion;
import org.apache.usergrid.persistence.model.entity.Id;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.netflix.astyanax.MutationBatch;
import com.netflix.astyanax.connectionpool.exceptions.ConnectionException;
import static org.apache.usergrid.persistence.core.util.IdGenerator.createId;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.same;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@Ignore("Pending re-enable of delete functionality")
public class ShardGroupDeletionImplTest {
protected AsyncTaskExecutor asyncTaskExecutor;
protected ListeningExecutorService listeningExecutorService;
private ApplicationScopeImpl scope;
@Before
public void setup() {
this.scope = new ApplicationScopeImpl( createId( "application" ) );
}
@After
public void shutDown() {
listeningExecutorService.shutdownNow();
}
@Test
public void shardCannotBeCompacted() throws ExecutionException, InterruptedException {
final long createTime = 10000;
final long currentTime = createTime;
final Shard shard0 = new Shard( 0, createTime, true );
final Shard shard1 = new Shard( 1000, createTime, false );
//set a 1 delta for testing
final ShardEntryGroup group = new ShardEntryGroup( 1 );
group.addShard( shard1 );
group.addShard( shard0 );
assertTrue( "this should return true for our test to succeed", group.isCompactionPending() );
final EdgeShardSerialization edgeShardSerialization = mock( EdgeShardSerialization.class );
final TimeService timeService = mock( TimeService.class );
when( timeService.getCurrentTime() ).thenReturn( currentTime );
initExecutor( 1, 1 );
final ShardGroupDeletionImpl shardGroupDeletion =
new ShardGroupDeletionImpl( asyncTaskExecutor, edgeShardSerialization, timeService );
final DirectedEdgeMeta directedEdgeMeta = getDirectedEdgeMeta();
final ListenableFuture<ShardGroupDeletion.DeleteResult> future =
shardGroupDeletion.maybeDeleteShard( this.scope, directedEdgeMeta, group, Collections.emptyIterator() );
final ShardGroupDeletion.DeleteResult result = future.get();
assertEquals( "should not delete with pending compaction", ShardGroupDeletion.DeleteResult.COMPACTION_PENDING,
result );
}
@Test
public void shardIsMinShard() throws ExecutionException, InterruptedException {
final long currentTime = 1000;
final Shard shard0 = Shard.MIN_SHARD;
//set a 1 delta for testing
final ShardEntryGroup group = new ShardEntryGroup( 1 );
group.addShard( shard0 );
assertTrue( "this should return false for our test to succeed", shard0.isMinShard() );
final EdgeShardSerialization edgeShardSerialization = mock( EdgeShardSerialization.class );
final TimeService timeService = mock( TimeService.class );
when( timeService.getCurrentTime() ).thenReturn( currentTime );
initExecutor( 1, 1 );
final ShardGroupDeletionImpl shardGroupDeletion =
new ShardGroupDeletionImpl( asyncTaskExecutor, edgeShardSerialization, timeService );
final DirectedEdgeMeta directedEdgeMeta = getDirectedEdgeMeta();
final ListenableFuture<ShardGroupDeletion.DeleteResult> future =
shardGroupDeletion.maybeDeleteShard( this.scope, directedEdgeMeta, group, Collections.emptyIterator() );
final ShardGroupDeletion.DeleteResult result = future.get();
assertEquals( "should not delete min shard", ShardGroupDeletion.DeleteResult.NO_OP, result );
}
@Test
public void shardTooNew() throws ExecutionException, InterruptedException {
final long createTime = 10000;
final long currentTime = createTime;
final Shard shard0 = new Shard( 0, createTime, true );
////set a delta for way in the future
final ShardEntryGroup group = new ShardEntryGroup( 1 );
group.addShard( shard0 );
assertFalse( "this should return false for our test to succeed", group.isCompactionPending() );
assertTrue( "this should return true for our test to succeed", group.isNew( currentTime ) );
final EdgeShardSerialization edgeShardSerialization = mock( EdgeShardSerialization.class );
final TimeService timeService = mock( TimeService.class );
when( timeService.getCurrentTime() ).thenReturn( currentTime );
initExecutor( 1, 1 );
final ShardGroupDeletionImpl shardGroupDeletion =
new ShardGroupDeletionImpl( asyncTaskExecutor, edgeShardSerialization, timeService );
final DirectedEdgeMeta directedEdgeMeta = getDirectedEdgeMeta();
final ListenableFuture<ShardGroupDeletion.DeleteResult> future =
shardGroupDeletion.maybeDeleteShard( this.scope, directedEdgeMeta, group, Collections.emptyIterator() );
final ShardGroupDeletion.DeleteResult result = future.get();
assertEquals( "should not delete within timeout period", ShardGroupDeletion.DeleteResult.TOO_NEW, result );
}
@Test
public void hasEdges() throws ExecutionException, InterruptedException {
final long createTime = 10000;
final long currentTime = createTime * 2;
final Shard shard0 = new Shard( 0, createTime, true );
////set a delta for way in the future
final ShardEntryGroup group = new ShardEntryGroup( 1 );
group.addShard( shard0 );
assertFalse( "this should return false for our test to succeed", group.isCompactionPending() );
assertFalse( "this should return false for our test to succeed", group.isNew( currentTime ) );
final EdgeShardSerialization edgeShardSerialization = mock( EdgeShardSerialization.class );
final TimeService timeService = mock( TimeService.class );
when( timeService.getCurrentTime() ).thenReturn( currentTime );
initExecutor( 1, 1 );
final ShardGroupDeletionImpl shardGroupDeletion =
new ShardGroupDeletionImpl( asyncTaskExecutor, edgeShardSerialization, timeService );
final DirectedEdgeMeta directedEdgeMeta = getDirectedEdgeMeta();
final Iterator<MarkedEdge> notMarkedIterator = Collections.singleton(
( MarkedEdge ) new SimpleMarkedEdge( createId( "source" ), "type", createId( "target" ), 1000, false ) )
.iterator();
final ListenableFuture<ShardGroupDeletion.DeleteResult> future =
shardGroupDeletion.maybeDeleteShard( this.scope, directedEdgeMeta, group, notMarkedIterator );
final ShardGroupDeletion.DeleteResult result = future.get();
assertEquals( "should not delete with edges", ShardGroupDeletion.DeleteResult.CONTAINS_EDGES, result );
//now check when marked we also retain them
final Iterator<MarkedEdge> markedEdgeIterator = Collections.singleton(
( MarkedEdge ) new SimpleMarkedEdge( createId( "source" ), "type", createId( "target" ), 1000, true ) )
.iterator();
final ListenableFuture<ShardGroupDeletion.DeleteResult> markedFuture =
shardGroupDeletion.maybeDeleteShard( this.scope, directedEdgeMeta, group, markedEdgeIterator );
final ShardGroupDeletion.DeleteResult markedResult = future.get();
assertEquals( "should not delete with edges", ShardGroupDeletion.DeleteResult.CONTAINS_EDGES, markedResult );
}
@Test
public void testDeletion() throws ExecutionException, InterruptedException, ConnectionException {
final long createTime = 10000;
final long currentTime = createTime * 2;
final Shard shard0 = new Shard( 1000, createTime, true );
////set a delta for way in the future
final ShardEntryGroup group = new ShardEntryGroup( 1 );
group.addShard( shard0 );
assertFalse( "this should return false for our test to succeed", group.isCompactionPending() );
assertFalse( "this should return false for our test to succeed", group.isNew( currentTime ) );
final DirectedEdgeMeta directedEdgeMeta = getDirectedEdgeMeta();
//mock up returning a mutation
final EdgeShardSerialization edgeShardSerialization = mock( EdgeShardSerialization.class );
final MutationBatch batch = mock( MutationBatch.class );
when( edgeShardSerialization.removeShardMeta( same( scope ), same( shard0 ), same( directedEdgeMeta ) ) )
.thenReturn( batch );
final TimeService timeService = mock( TimeService.class );
when( timeService.getCurrentTime() ).thenReturn( currentTime );
initExecutor( 1, 1 );
final ShardGroupDeletionImpl shardGroupDeletion =
new ShardGroupDeletionImpl( asyncTaskExecutor, edgeShardSerialization, timeService );
final ListenableFuture<ShardGroupDeletion.DeleteResult> future =
shardGroupDeletion.maybeDeleteShard( this.scope, directedEdgeMeta, group, Collections.emptyIterator() );
final ShardGroupDeletion.DeleteResult result = future.get();
assertEquals( "should delete", ShardGroupDeletion.DeleteResult.DELETED, result );
verify(batch).execute();
}
private DirectedEdgeMeta getDirectedEdgeMeta() {
final Id sourceId = createId( "source" );
final String edgeType = "test";
final DirectedEdgeMeta directedEdgeMeta = DirectedEdgeMeta.fromSourceNode( sourceId, edgeType );
return directedEdgeMeta;
}
private void initExecutor( final int numberThreads, final int queueLength ) {
listeningExecutorService = MoreExecutors.listeningDecorator( TaskExecutorFactory
.createTaskExecutor( "GraphTaskExecutor", numberThreads, queueLength,
TaskExecutorFactory.RejectionAction.ABORT ) );
asyncTaskExecutor = mock( AsyncTaskExecutor.class );
when( asyncTaskExecutor.getExecutorService() ).thenReturn( listeningExecutorService );
}
}
| |
/*
* Copyright 2013 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser.types;
import gw.lang.reflect.IAnnotationInfo;
import gw.lang.reflect.IConstructorInfo;
import gw.lang.reflect.IHasParameterInfos;
import gw.lang.reflect.IMethodInfo;
import gw.lang.reflect.INonLoadableType;
import gw.lang.reflect.IParameterInfo;
import gw.lang.reflect.IPropertyAccessor;
import gw.lang.reflect.IPropertyInfo;
import gw.lang.reflect.IType;
import gw.lang.reflect.ITypeInfo;
import gw.lang.reflect.ITypeLoader;
import gw.lang.reflect.MethodList;
import gw.lang.reflect.PropertyInfoBase;
import gw.lang.reflect.TypeBase;
import gw.lang.reflect.TypeInfoBase;
import gw.lang.reflect.java.JavaTypes;
import gw.util.concurrent.LockingLazyVar;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
public class FunctionLiteralType extends TypeBase implements IType, INonLoadableType
{
private LockingLazyVar<ITypeInfo> _typeInfo = new LockingLazyVar<ITypeInfo>()
{
@Override
protected ITypeInfo init()
{
return makeTypeInfo();
}
};
private IHasParameterInfos _feature;
private IType _superType;
private Class _clazz;
public static FunctionLiteralType resolve( Class clazz, IType superType, IType referenceType, CharSequence name, IType[] parameters )
{
IHasParameterInfos feature;
if( name == null )
{
feature = referenceType.getTypeInfo().getConstructor( parameters );
}
else
{
feature = referenceType.getTypeInfo().getMethod( name, parameters );
}
return new FunctionLiteralType( clazz, superType, feature );
}
public Class getSerializationClass() {
return _clazz;
}
public IType getSerializationSuperType() {
return _superType;
}
public IType getSerializationReferenceType() {
return _feature.getOwnersType();
}
public String getSerializationName()
{
if( _feature instanceof IMethodInfo )
{
return _feature.getDisplayName();
}
else
{
return null;
}
}
public IType[] getSerializationParameters()
{
List<IType> paramTypes = new ArrayList<IType>();
for( IParameterInfo pi : _feature.getParameters() )
{
paramTypes.add( pi.getFeatureType() );
}
return paramTypes.toArray( new IType[paramTypes.size()] );
}
public FunctionLiteralType( Class clazz, IType rawType, IHasParameterInfos feature )
{
super( clazz );
_clazz = clazz;
_feature = feature;
_superType = rawType;
}
@Override
public String getName()
{
return _feature.getName();
}
@Override
public String getRelativeName()
{
return _feature.getName();
}
@Override
public String getNamespace()
{
return null;
}
@Override
public ITypeLoader getTypeLoader()
{
return _superType.getTypeLoader();
}
@Override
public IType getSupertype()
{
return _superType;
}
@Override
public IType[] getInterfaces()
{
return _superType.getInterfaces();
}
@Override
public ITypeInfo getTypeInfo()
{
return _typeInfo.get();
}
protected Set<? extends IType> loadAllTypesInHierarchy() {
return getAllClassesInClassHierarchyAsIntrinsicTypes( this );
}
public boolean isAssignableFrom(IType type) {
return type.getAllTypesInHierarchy().contains( this );
}
private ITypeInfo makeTypeInfo()
{
return new FunctionLiteralTypeInfo();
}
private class FunctionLiteralTypeInfo extends TypeInfoBase implements ITypeInfo
{
private LockingLazyVar<List<? extends IPropertyInfo>> _properties = new LockingLazyVar<List<? extends IPropertyInfo>>()
{
@Override
protected List<? extends IPropertyInfo> init()
{
List<IPropertyInfo> properties = new ArrayList( _superType.getTypeInfo().getProperties() );
IParameterInfo[] parameters = _feature.getParameters();
for( IParameterInfo parameter : parameters )
{
properties.add( new FunctionLiteralParameterProperty( FunctionLiteralTypeInfo.this, parameter ) );
}
return properties;
}
};
@Override
public List<? extends IPropertyInfo> getProperties()
{
return _properties.get();
}
@Override
public IPropertyInfo getProperty( CharSequence propName )
{
for( IPropertyInfo propertyInfo : getProperties() )
{
if( propertyInfo.getName().substring( 0 ).equals( propName ) )
{
return propertyInfo;
}
}
return null;
}
@Override
public MethodList getMethods()
{
return _superType.getTypeInfo().getMethods();
}
@Override
public List<? extends IConstructorInfo> getConstructors()
{
return _superType.getTypeInfo().getConstructors();
}
@Override
public List<IAnnotationInfo> getDeclaredAnnotations()
{
return Collections.emptyList();
}
@Override
public IType getOwnersType()
{
return FunctionLiteralType.this;
}
}
private class FunctionLiteralParameterProperty extends PropertyInfoBase implements IPropertyInfo
{
private String _name;
private IParameterInfo _parameter;
public FunctionLiteralParameterProperty( ITypeInfo container, IParameterInfo parameter )
{
super( container );
_name = "$" + parameter.getName();
_parameter = parameter;
}
@Override
public boolean isReadable()
{
return true;
}
@Override
public boolean isWritable( IType whosAskin )
{
return false;
}
@Override
public IPropertyAccessor getAccessor()
{
return new IPropertyAccessor()
{
@Override
public Object getValue( Object ctx )
{
return _parameter;
}
@Override
public void setValue( Object ctx, Object value )
{
throw new UnsupportedOperationException( "Can't set property value!" );
}
};
}
@Override
public List<IAnnotationInfo> getDeclaredAnnotations()
{
return Collections.emptyList();
}
@Override
public String getName()
{
return _name;
}
@Override
public IType getFeatureType()
{
return JavaTypes.getGosuType(IParameterInfo.class);
}
}
}
| |
// Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static java.util.stream.Collectors.joining;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.PlatformConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.platform.DeclaredToolchainInfo;
import com.google.devtools.build.lib.analysis.platform.PlatformProviderUtils;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.LabelConstants;
import com.google.devtools.build.lib.cmdline.TargetParsingException;
import com.google.devtools.build.lib.packages.Package;
import com.google.devtools.build.lib.pkgcache.FilteringPolicies;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyFunctionException.Transience;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.build.skyframe.ValueOrException;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
/**
* {@link SkyFunction} that returns all registered toolchains available for toolchain resolution.
*/
public class RegisteredToolchainsFunction implements SkyFunction {
@Nullable
@Override
public SkyValue compute(SkyKey skyKey, Environment env)
throws SkyFunctionException, InterruptedException {
BuildConfigurationValue buildConfigurationValue =
(BuildConfigurationValue)
env.getValue(((RegisteredToolchainsValue.Key) skyKey).getConfigurationKey());
if (env.valuesMissing()) {
return null;
}
BuildConfiguration configuration = buildConfigurationValue.getConfiguration();
ImmutableList.Builder<String> targetPatternBuilder = new ImmutableList.Builder<>();
// Get the toolchains from the configuration.
PlatformConfiguration platformConfiguration =
configuration.getFragment(PlatformConfiguration.class);
targetPatternBuilder.addAll(platformConfiguration.getExtraToolchains());
// Get the registered toolchains from the WORKSPACE.
targetPatternBuilder.addAll(getWorkspaceToolchains(env));
if (env.valuesMissing()) {
return null;
}
ImmutableList<String> targetPatterns = targetPatternBuilder.build();
// Expand target patterns.
ImmutableList<Label> toolchainLabels;
try {
toolchainLabels =
TargetPatternUtil.expandTargetPatterns(
env, targetPatterns, FilteringPolicies.ruleType("toolchain", true));
if (env.valuesMissing()) {
return null;
}
} catch (TargetPatternUtil.InvalidTargetPatternException e) {
throw new RegisteredToolchainsFunctionException(
new InvalidToolchainLabelException(e), Transience.PERSISTENT);
}
// Load the configured target for each, and get the declared toolchain providers.
ImmutableList<DeclaredToolchainInfo> registeredToolchains =
configureRegisteredToolchains(env, configuration, toolchainLabels);
if (env.valuesMissing()) {
return null;
}
return RegisteredToolchainsValue.create(registeredToolchains);
}
private Iterable<? extends String> getWorkspaceToolchains(Environment env)
throws InterruptedException {
List<String> patterns = getRegisteredToolchains(env);
if (patterns == null) {
return ImmutableList.of();
}
return patterns;
}
/**
* Loads the external package and then returns the registered toolchains.
*
* @param env the environment to use for lookups
*/
@Nullable
@VisibleForTesting
public static List<String> getRegisteredToolchains(Environment env) throws InterruptedException {
PackageValue externalPackageValue =
(PackageValue) env.getValue(PackageValue.key(LabelConstants.EXTERNAL_PACKAGE_IDENTIFIER));
if (externalPackageValue == null) {
return null;
}
Package externalPackage = externalPackageValue.getPackage();
return externalPackage.getRegisteredToolchains();
}
private ImmutableList<DeclaredToolchainInfo> configureRegisteredToolchains(
Environment env,
BuildConfiguration configuration,
List<Label> labels)
throws InterruptedException, RegisteredToolchainsFunctionException {
ImmutableList<SkyKey> keys =
labels.stream()
.map(
label ->
ConfiguredTargetKey.builder()
.setLabel(label)
.setConfiguration(configuration)
.build())
.collect(toImmutableList());
Map<SkyKey, ValueOrException<ConfiguredValueCreationException>> values =
env.getValuesOrThrow(keys, ConfiguredValueCreationException.class);
ImmutableList.Builder<DeclaredToolchainInfo> toolchains = new ImmutableList.Builder<>();
boolean valuesMissing = false;
for (SkyKey key : keys) {
ConfiguredTargetKey configuredTargetKey = (ConfiguredTargetKey) key.argument();
Label toolchainLabel = configuredTargetKey.getLabel();
try {
ValueOrException<ConfiguredValueCreationException> valueOrException = values.get(key);
if (valueOrException.get() == null) {
valuesMissing = true;
continue;
}
ConfiguredTarget target =
((ConfiguredTargetValue) valueOrException.get()).getConfiguredTarget();
if (configuration.trimConfigurationsRetroactively()
&& !target.getConfigurationKey().getFragments().isEmpty()) {
// No fragment may be present on a toolchain rule in retroactive trimming mode.
// This is because trimming expects that platform and toolchain resolution uses only
// the platform configuration. In theory, this means toolchains could use platforms, but
// the current expectation is that toolchains should not use anything at all, so better
// to go with the stricter expectation for now.
String extraFragmentDescription =
target.getConfigurationKey().getFragments().stream()
.map(cl -> cl.getSimpleName())
.collect(joining(","));
throw new RegisteredToolchainsFunctionException(
new InvalidToolchainLabelException(
toolchainLabel,
"this toolchain uses configuration, which is forbidden in retroactive trimming "
+ "mode: "
+ "extra fragments are ["
+ extraFragmentDescription
+ "]"),
Transience.PERSISTENT);
}
DeclaredToolchainInfo toolchainInfo = PlatformProviderUtils.declaredToolchainInfo(target);
if (toolchainInfo == null) {
throw new RegisteredToolchainsFunctionException(
new InvalidToolchainLabelException(toolchainLabel), Transience.PERSISTENT);
}
toolchains.add(toolchainInfo);
} catch (ConfiguredValueCreationException e) {
throw new RegisteredToolchainsFunctionException(
new InvalidToolchainLabelException(toolchainLabel, e), Transience.PERSISTENT);
}
}
if (valuesMissing) {
return null;
}
return toolchains.build();
}
@Nullable
@Override
public String extractTag(SkyKey skyKey) {
return null;
}
/**
* Used to indicate that the given {@link Label} represents a {@link ConfiguredTarget} which is
* not a valid {@link DeclaredToolchainInfo} provider.
*/
public static final class InvalidToolchainLabelException extends ToolchainException {
public InvalidToolchainLabelException(Label invalidLabel) {
super(
formatMessage(
invalidLabel.getCanonicalForm(),
"target does not provide the DeclaredToolchainInfo provider"));
}
public InvalidToolchainLabelException(Label invalidLabel, String reason) {
super(formatMessage(invalidLabel.getCanonicalForm(), reason));
}
public InvalidToolchainLabelException(TargetPatternUtil.InvalidTargetPatternException e) {
this(e.getInvalidPattern(), e.getTpe());
}
public InvalidToolchainLabelException(String invalidPattern, TargetParsingException e) {
super(formatMessage(invalidPattern, e.getMessage()), e);
}
public InvalidToolchainLabelException(Label invalidLabel, ConfiguredValueCreationException e) {
super(formatMessage(invalidLabel.getCanonicalForm(), e.getMessage()), e);
}
private static String formatMessage(String invalidPattern, String reason) {
return String.format("invalid registered toolchain '%s': %s", invalidPattern, reason);
}
}
/**
* Used to declare all the exception types that can be wrapped in the exception thrown by {@link
* #compute}.
*/
public static class RegisteredToolchainsFunctionException extends SkyFunctionException {
public RegisteredToolchainsFunctionException(
InvalidToolchainLabelException cause, Transience transience) {
super(cause, transience);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode.snapshot;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.server.datanode.BlockPoolSliceStorage;
import org.apache.hadoop.hdfs.server.datanode.BlockScanner;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DirectoryScanner;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.INode;
import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.ipc.ProtobufRpcEngine.Server;
import org.apache.hadoop.metrics2.impl.MetricsSystemImpl;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert;
/**
* Helper for writing snapshot related tests
*/
public class SnapshotTestHelper {
public static final Log LOG = LogFactory.getLog(SnapshotTestHelper.class);
/** Disable the logs that are not very useful for snapshot related tests. */
public static void disableLogs() {
final String[] lognames = {
"org.apache.hadoop.hdfs.server.datanode.BlockPoolSliceScanner",
"org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl",
"org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetAsyncDiskService",
};
for(String n : lognames) {
GenericTestUtils.disableLog(LogFactory.getLog(n));
}
GenericTestUtils.disableLog(LogFactory.getLog(UserGroupInformation.class));
GenericTestUtils.disableLog(LogFactory.getLog(BlockManager.class));
GenericTestUtils.disableLog(LogFactory.getLog(FSNamesystem.class));
GenericTestUtils.disableLog(LogFactory.getLog(DirectoryScanner.class));
GenericTestUtils.disableLog(LogFactory.getLog(MetricsSystemImpl.class));
GenericTestUtils.disableLog(BlockScanner.LOG);
GenericTestUtils.disableLog(HttpServer2.LOG);
GenericTestUtils.disableLog(DataNode.LOG);
GenericTestUtils.disableLog(BlockPoolSliceStorage.LOG);
GenericTestUtils.disableLog(LeaseManager.LOG);
GenericTestUtils.disableLog(NameNode.stateChangeLog);
GenericTestUtils.disableLog(NameNode.blockStateChangeLog);
GenericTestUtils.disableLog(DFSClient.LOG);
GenericTestUtils.disableLog(Server.LOG);
}
private SnapshotTestHelper() {
// Cannot be instantinatied
}
public static Path getSnapshotRoot(Path snapshottedDir, String snapshotName) {
return new Path(snapshottedDir, HdfsConstants.DOT_SNAPSHOT_DIR + "/"
+ snapshotName);
}
public static Path getSnapshotPath(Path snapshottedDir, String snapshotName,
String fileLocalName) {
return new Path(getSnapshotRoot(snapshottedDir, snapshotName),
fileLocalName);
}
/**
* Create snapshot for a dir using a given snapshot name
*
* @param hdfs DistributedFileSystem instance
* @param snapshotRoot The dir to be snapshotted
* @param snapshotName The name of the snapshot
* @return The path of the snapshot root
*/
public static Path createSnapshot(DistributedFileSystem hdfs,
Path snapshotRoot, String snapshotName) throws Exception {
LOG.info("createSnapshot " + snapshotName + " for " + snapshotRoot);
assertTrue(hdfs.exists(snapshotRoot));
hdfs.allowSnapshot(snapshotRoot);
hdfs.createSnapshot(snapshotRoot, snapshotName);
// set quota to a large value for testing counts
hdfs.setQuota(snapshotRoot, Long.MAX_VALUE-1, Long.MAX_VALUE-1);
return SnapshotTestHelper.getSnapshotRoot(snapshotRoot, snapshotName);
}
/**
* Check the functionality of a snapshot.
*
* @param hdfs DistributedFileSystem instance
* @param snapshotRoot The root of the snapshot
* @param snapshottedDir The snapshotted directory
*/
public static void checkSnapshotCreation(DistributedFileSystem hdfs,
Path snapshotRoot, Path snapshottedDir) throws Exception {
// Currently we only check if the snapshot was created successfully
assertTrue(hdfs.exists(snapshotRoot));
// Compare the snapshot with the current dir
FileStatus[] currentFiles = hdfs.listStatus(snapshottedDir);
FileStatus[] snapshotFiles = hdfs.listStatus(snapshotRoot);
assertEquals("snapshottedDir=" + snapshottedDir
+ ", snapshotRoot=" + snapshotRoot,
currentFiles.length, snapshotFiles.length);
}
/**
* Compare two dumped trees that are stored in two files. The following is an
* example of the dumped tree:
*
* <pre>
* information of root
* +- the first child of root (e.g., /foo)
* +- the first child of /foo
* ...
* \- the last child of /foo (e.g., /foo/bar)
* +- the first child of /foo/bar
* ...
* snapshots of /foo
* +- snapshot s_1
* ...
* \- snapshot s_n
* +- second child of root
* ...
* \- last child of root
*
* The following information is dumped for each inode:
* localName (className@hashCode) parent permission group user
*
* Specific information for different types of INode:
* {@link INodeDirectory}:childrenSize
* {@link INodeFile}: fileSize, block list. Check {@link BlockInfo#toString()}
* and {@link BlockInfoUnderConstruction#toString()} for detailed information.
* {@link FileWithSnapshot}: next link
* </pre>
* @see INode#dumpTreeRecursively()
*/
public static void compareDumpedTreeInFile(File file1, File file2,
boolean compareQuota) throws IOException {
try {
compareDumpedTreeInFile(file1, file2, compareQuota, false);
} catch(Throwable t) {
LOG.info("FAILED compareDumpedTreeInFile(" + file1 + ", " + file2 + ")", t);
compareDumpedTreeInFile(file1, file2, compareQuota, true);
}
}
private static void compareDumpedTreeInFile(File file1, File file2,
boolean compareQuota, boolean print) throws IOException {
if (print) {
printFile(file1);
printFile(file2);
}
BufferedReader reader1 = new BufferedReader(new FileReader(file1));
BufferedReader reader2 = new BufferedReader(new FileReader(file2));
try {
String line1 = "";
String line2 = "";
while ((line1 = reader1.readLine()) != null
&& (line2 = reader2.readLine()) != null) {
if (print) {
System.out.println();
System.out.println("1) " + line1);
System.out.println("2) " + line2);
}
// skip the hashCode part of the object string during the comparison,
// also ignore the difference between INodeFile/INodeFileWithSnapshot
line1 = line1.replaceAll("INodeFileWithSnapshot", "INodeFile");
line2 = line2.replaceAll("INodeFileWithSnapshot", "INodeFile");
line1 = line1.replaceAll("@[\\dabcdef]+", "");
line2 = line2.replaceAll("@[\\dabcdef]+", "");
// skip the replica field of the last block of an
// INodeFileUnderConstruction
line1 = line1.replaceAll("replicas=\\[.*\\]", "replicas=[]");
line2 = line2.replaceAll("replicas=\\[.*\\]", "replicas=[]");
if (!compareQuota) {
line1 = line1.replaceAll("Quota\\[.*\\]", "Quota[]");
line2 = line2.replaceAll("Quota\\[.*\\]", "Quota[]");
}
// skip the specific fields of BlockInfoUnderConstruction when the node
// is an INodeFileSnapshot or an INodeFileUnderConstructionSnapshot
if (line1.contains("(INodeFileSnapshot)")
|| line1.contains("(INodeFileUnderConstructionSnapshot)")) {
line1 = line1.replaceAll(
"\\{blockUCState=\\w+, primaryNodeIndex=[-\\d]+, replicas=\\[\\]\\}",
"");
line2 = line2.replaceAll(
"\\{blockUCState=\\w+, primaryNodeIndex=[-\\d]+, replicas=\\[\\]\\}",
"");
}
assertEquals(line1, line2);
}
Assert.assertNull(reader1.readLine());
Assert.assertNull(reader2.readLine());
} finally {
reader1.close();
reader2.close();
}
}
static void printFile(File f) throws IOException {
System.out.println();
System.out.println("File: " + f);
BufferedReader in = new BufferedReader(new FileReader(f));
try {
for(String line; (line = in.readLine()) != null; ) {
System.out.println(line);
}
} finally {
in.close();
}
}
public static void dumpTree2File(FSDirectory fsdir, File f) throws IOException{
final PrintWriter out = new PrintWriter(new FileWriter(f, false), true);
fsdir.getINode("/").dumpTreeRecursively(out, new StringBuilder(),
Snapshot.CURRENT_STATE_ID);
out.close();
}
/**
* Generate the path for a snapshot file.
*
* @param snapshotRoot of format
* {@literal <snapshottble_dir>/.snapshot/<snapshot_name>}
* @param file path to a file
* @return The path of the snapshot of the file assuming the file has a
* snapshot under the snapshot root of format
* {@literal <snapshottble_dir>/.snapshot/<snapshot_name>/<path_to_file_inside_snapshot>}
* . Null if the file is not under the directory associated with the
* snapshot root.
*/
static Path getSnapshotFile(Path snapshotRoot, Path file) {
Path rootParent = snapshotRoot.getParent();
if (rootParent != null && rootParent.getName().equals(".snapshot")) {
Path snapshotDir = rootParent.getParent();
if (file.toString().contains(snapshotDir.toString())
&& !file.equals(snapshotDir)) {
String fileName = file.toString().substring(
snapshotDir.toString().length() + 1);
Path snapshotFile = new Path(snapshotRoot, fileName);
return snapshotFile;
}
}
return null;
}
/**
* A class creating directories trees for snapshot testing. For simplicity,
* the directory tree is a binary tree, i.e., each directory has two children
* as snapshottable directories.
*/
static class TestDirectoryTree {
/** Height of the directory tree */
final int height;
/** Top node of the directory tree */
final Node topNode;
/** A map recording nodes for each tree level */
final Map<Integer, ArrayList<Node>> levelMap;
/**
* Constructor to build a tree of given {@code height}
*/
TestDirectoryTree(int height, FileSystem fs) throws Exception {
this.height = height;
this.topNode = new Node(new Path("/TestSnapshot"), 0,
null, fs);
this.levelMap = new HashMap<Integer, ArrayList<Node>>();
addDirNode(topNode, 0);
genChildren(topNode, height - 1, fs);
}
/**
* Add a node into the levelMap
*/
private void addDirNode(Node node, int atLevel) {
ArrayList<Node> list = levelMap.get(atLevel);
if (list == null) {
list = new ArrayList<Node>();
levelMap.put(atLevel, list);
}
list.add(node);
}
int id = 0;
/**
* Recursively generate the tree based on the height.
*
* @param parent The parent node
* @param level The remaining levels to generate
* @param fs The FileSystem where to generate the files/dirs
* @throws Exception
*/
private void genChildren(Node parent, int level, FileSystem fs)
throws Exception {
if (level == 0) {
return;
}
parent.leftChild = new Node(new Path(parent.nodePath,
"left" + ++id), height - level, parent, fs);
parent.rightChild = new Node(new Path(parent.nodePath,
"right" + ++id), height - level, parent, fs);
addDirNode(parent.leftChild, parent.leftChild.level);
addDirNode(parent.rightChild, parent.rightChild.level);
genChildren(parent.leftChild, level - 1, fs);
genChildren(parent.rightChild, level - 1, fs);
}
/**
* Randomly retrieve a node from the directory tree.
*
* @param random A random instance passed by user.
* @param excludedList Excluded list, i.e., the randomly generated node
* cannot be one of the nodes in this list.
* @return a random node from the tree.
*/
Node getRandomDirNode(Random random, List<Node> excludedList) {
while (true) {
int level = random.nextInt(height);
ArrayList<Node> levelList = levelMap.get(level);
int index = random.nextInt(levelList.size());
Node randomNode = levelList.get(index);
if (excludedList == null || !excludedList.contains(randomNode)) {
return randomNode;
}
}
}
/**
* The class representing a node in {@link TestDirectoryTree}.
* <br>
* This contains:
* <ul>
* <li>Two children representing the two snapshottable directories</li>
* <li>A list of files for testing, so that we can check snapshots
* after file creation/deletion/modification.</li>
* <li>A list of non-snapshottable directories, to test snapshots with
* directory creation/deletion. Note that this is needed because the
* deletion of a snapshottale directory with snapshots is not allowed.</li>
* </ul>
*/
static class Node {
/** The level of this node in the directory tree */
final int level;
/** Children */
Node leftChild;
Node rightChild;
/** Parent node of the node */
final Node parent;
/** File path of the node */
final Path nodePath;
/**
* The file path list for testing snapshots before/after file
* creation/deletion/modification
*/
ArrayList<Path> fileList;
/**
* Each time for testing snapshots with file creation, since we do not
* want to insert new files into the fileList, we always create the file
* that was deleted last time. Thus we record the index for deleted file
* in the fileList, and roll the file modification forward in the list.
*/
int nullFileIndex = 0;
/**
* A list of non-snapshottable directories for testing snapshots with
* directory creation/deletion
*/
final ArrayList<Node> nonSnapshotChildren;
Node(Path path, int level, Node parent,
FileSystem fs) throws Exception {
this.nodePath = path;
this.level = level;
this.parent = parent;
this.nonSnapshotChildren = new ArrayList<Node>();
fs.mkdirs(nodePath);
}
/**
* Create files and add them in the fileList. Initially the last element
* in the fileList is set to null (where we start file creation).
*/
void initFileList(FileSystem fs, String namePrefix, long fileLen,
short replication, long seed, int numFiles) throws Exception {
fileList = new ArrayList<Path>(numFiles);
for (int i = 0; i < numFiles; i++) {
Path file = new Path(nodePath, namePrefix + "-f" + i);
fileList.add(file);
if (i < numFiles - 1) {
DFSTestUtil.createFile(fs, file, fileLen, replication, seed);
}
}
nullFileIndex = numFiles - 1;
}
@Override
public boolean equals(Object o) {
if (o != null && o instanceof Node) {
Node node = (Node) o;
return node.nodePath.equals(nodePath);
}
return false;
}
@Override
public int hashCode() {
return nodePath.hashCode();
}
}
}
public static void dumpTree(String message, MiniDFSCluster cluster
) throws UnresolvedLinkException {
System.out.println("XXX " + message);
cluster.getNameNode().getNamesystem().getFSDirectory().getINode("/"
).dumpTreeRecursively(System.out);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.